diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 92b75accdcdb5..dd51082981ffc 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -37,3 +37,4 @@ BWC_VERSION: - "1.2.4" - "1.2.5" - "1.3.0" + - "1.4.0" \ No newline at end of file diff --git a/.github/workflows/dco.yml b/.github/workflows/dco.yml deleted file mode 100644 index cf30ea89dcbcb..0000000000000 --- a/.github/workflows/dco.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Developer Certificate of Origin Check - -on: [pull_request] - -jobs: - check: - runs-on: ubuntu-latest - - steps: - - name: Get PR Commits - id: 'get-pr-commits' - uses: tim-actions/get-pr-commits@v1.1.0 - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: DCO Check - uses: tim-actions/dco@v1.1.0 - with: - commits: ${{ steps.get-pr-commits.outputs.commits }} diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 839144d06b6ec..58444441e3258 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -33,8 +33,6 @@ - [runtimeOnly](#runtimeonly) - [compileOnly](#compileonly) - [testImplementation](#testimplementation) - - [Gradle Plugins](#gradle-plugins) - - [Distribution Download Plugin](#distribution-download-plugin) - [Misc](#misc) - [git-secrets](#git-secrets) - [Installation](#installation) @@ -63,7 +61,19 @@ Fork [opensearch-project/OpenSearch](https://github.com/opensearch-project/OpenS #### JDK 11 -OpenSearch builds using Java 11 at a minimum. This means you must have a JDK 11 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 11 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-11`. +OpenSearch builds using Java 11 at a minimum, using the Adoptium distribution. This means you must have a JDK 11 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 11 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-11`. This is configured in [buildSrc/build.gradle](buildSrc/build.gradle) and [distribution/tools/java-version-checker/build.gradle](distribution/tools/java-version-checker/build.gradle). + +``` +allprojects { + targetCompatibility = JavaVersion.VERSION_11 + sourceCompatibility = JavaVersion.VERSION_11 +} +``` + +``` +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 +``` Download Java 11 from [here](https://adoptium.net/releases.html?variant=openjdk11). @@ -71,9 +81,18 @@ Download Java 11 from [here](https://adoptium.net/releases.html?variant=openjdk1 To run the full suite of tests, download and install [JDK 14](https://jdk.java.net/archive/) and set `JAVA11_HOME`, and `JAVA14_HOME`. They are required by the [backwards compatibility test](./TESTING.md#testing-backwards-compatibility). -#### Runtime JDK +#### JDK 17 -By default, the test tasks use bundled JDK runtime, configured in `buildSrc/version.properties` and set to JDK 17 (LTS). Other kind of test tasks (integration, cluster, ... ) use the same runtime as `JAVA_HOME`. However, the build supports compiling with JDK 11 and testing on a different version of JDK runtime. To do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of another JDK installation, e.g. `RUNTIME_JAVA_HOME=/usr/lib/jvm/jdk-14`. Alternatively, the runtime JDK version could be provided as the command line argument, using combination of `runtime.java=` property and `JAVA_HOME` environment variable, for example `./gradlew -Druntime.java=17 ...` (in this case, the tooling expects `JAVA17_HOME` environment variable to be set). +By default, the test tasks use bundled JDK runtime, configured in [buildSrc/version.properties](buildSrc/version.properties), and set to JDK 17 (LTS). + +``` +bundled_jdk_vendor = adoptium +bundled_jdk = 17.0.2+8 +``` + +#### Custom Runtime JDK + +Other kind of test tasks (integration, cluster, etc.) use the same runtime as `JAVA_HOME`. However, the build also supports compiling with one version of JDK, and testing on a different version. To do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of another JDK installation, e.g. `RUNTIME_JAVA_HOME=/usr/lib/jvm/jdk-14`. Alternatively, the runtime JDK version could be provided as the command line argument, using combination of `runtime.java=` property and `JAVA_HOME` environment variable, for example `./gradlew -Druntime.java=17 ...` (in this case, the tooling expects `JAVA17_HOME` environment variable to be set). #### Windows @@ -342,15 +361,6 @@ somehow. OpenSearch plugins use this configuration to include dependencies that Code that is on the classpath for compiling tests that are part of this project but not production code. The canonical example of this is `junit`. -### Gradle Plugins - -#### Distribution Download Plugin - -The Distribution Download plugin downloads the latest version of OpenSearch by default, and supports overriding this behavior by setting `customDistributionUrl`. -``` -./gradlew integTest -DcustomDistributionUrl="https://ci.opensearch.org/ci/dbc/bundle-build/1.2.0/1127/linux/x64/dist/opensearch-1.2.0-linux-x64.tar.gz" -``` - ## Misc ### git-secrets diff --git a/TESTING.md b/TESTING.md index 5571b7c7a4aaf..4a2a786469b67 100644 --- a/TESTING.md +++ b/TESTING.md @@ -245,7 +245,7 @@ The YAML REST tests support all the options provided by the randomized runner, p - `tests.rest.suite`: comma separated paths of the test suites to be run (by default loaded from /rest-api-spec/test). It is possible to run only a subset of the tests providing a sub-folder or even a single yaml file (the default /rest-api-spec/test prefix is optional when files are loaded from classpath) e.g. `-Dtests.rest.suite=index,get,create/10_with_id` -- `tests.rest.blacklist`: comma separated globs that identify tests that are blacklisted and need to be skipped e.g. `-Dtests.rest.blacklist=index/**/Index document,get/10_basic/**` +- `tests.rest.blacklist`: comma separated globs that identify tests that are denylisted and need to be skipped e.g. `-Dtests.rest.blacklist=index/**/Index document,get/10_basic/**` Java REST tests can be run with the "javaRestTest" task. diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 1a6846e9eecb0..faaeb33d80ff7 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -49,7 +49,7 @@ dependencies { annotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh" // Dependencies of JMH runtimeOnly 'net.sf.jopt-simple:jopt-simple:5.0.4' - runtimeOnly 'org.apache.commons:commons-math3:3.2' + runtimeOnly 'org.apache.commons:commons-math3:3.6.1' } // enable the JMH's BenchmarkProcessor to generate the final benchmark classes diff --git a/build.gradle b/build.gradle index c12f7ece4d39c..374bfb3ccfae3 100644 --- a/build.gradle +++ b/build.gradle @@ -244,7 +244,7 @@ allprojects { compile.options.compilerArgs << '-Xlint:opens' compile.options.compilerArgs << '-Xlint:overloads' compile.options.compilerArgs << '-Xlint:overrides' - compile.options.compilerArgs << '-Xlint:processing' + compile.options.compilerArgs << '-Xlint:-processing' compile.options.compilerArgs << '-Xlint:rawtypes' compile.options.compilerArgs << '-Xlint:removal' compile.options.compilerArgs << '-Xlint:requires-automatic' diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 6565dc1646ac0..ff79cc5df0df0 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -105,7 +105,7 @@ dependencies { api 'commons-codec:commons-codec:1.15' api 'org.apache.commons:commons-compress:1.21' api 'org.apache.ant:ant:1.10.12' - api 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' + api 'com.netflix.nebula:gradle-extra-configurations-plugin:7.0.0' api 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' api 'com.netflix.nebula:gradle-info-plugin:7.1.3' api 'org.apache.rat:apache-rat:0.13' @@ -124,7 +124,7 @@ dependencies { testFixturesApi gradleTestKit() testImplementation 'com.github.tomakehurst:wiremock-jre8-standalone:2.23.2' testImplementation "org.mockito:mockito-core:${props.getProperty('mockito')}" - integTestImplementation('org.spockframework:spock-core:2.0-groovy-3.0') { + integTestImplementation('org.spockframework:spock-core:2.1-groovy-3.0') { exclude module: "groovy" } } @@ -158,8 +158,8 @@ if (project != rootProject) { apply plugin: 'opensearch.publish' allprojects { - targetCompatibility = 11 - sourceCompatibility = 11 + targetCompatibility = JavaVersion.VERSION_11 + sourceCompatibility = JavaVersion.VERSION_11 } // groovydoc succeeds, but has some weird internal exception... diff --git a/buildSrc/src/main/groovy/org/opensearch/gradle/ResolveAllDependencies.java b/buildSrc/src/main/groovy/org/opensearch/gradle/ResolveAllDependencies.java index 2c3c8bf2629bc..63ad25a977b68 100644 --- a/buildSrc/src/main/groovy/org/opensearch/gradle/ResolveAllDependencies.java +++ b/buildSrc/src/main/groovy/org/opensearch/gradle/ResolveAllDependencies.java @@ -55,7 +55,7 @@ static boolean canBeResolved(Configuration configuration) { return false; } if (configuration instanceof org.gradle.internal.deprecation.DeprecatableConfiguration) { - var deprecatableConfiguration = (DeprecatableConfiguration) configuration; + DeprecatableConfiguration deprecatableConfiguration = (DeprecatableConfiguration) configuration; if (deprecatableConfiguration.canSafelyBeResolved() == false) { return false; } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java index 8b5e81bc2ef07..843a7f7d2716d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java @@ -105,6 +105,7 @@ public void apply(Project project) { setupResolutionsContainer(project); setupDistributionContainer(project, dockerSupport); + setupDownloadServiceRepo(project); project.afterEvaluate(this::setupDistributions); } @@ -152,7 +153,6 @@ void setupDistributions(Project project) { dependencies.add(distribution.getExtracted().getName(), distributionDependency.getExtractedNotation()); } } - setupDownloadServiceRepo(project); } private DistributionDependency resolveDependencyNotation(Project p, OpenSearchDistribution distribution) { @@ -195,22 +195,15 @@ private static void setupDownloadServiceRepo(Project project) { if (project.getRepositories().findByName(DOWNLOAD_REPO_NAME) != null) { return; } - Object customDistributionUrl = project.findProperty("customDistributionUrl"); - // checks if custom Distribution Url has been passed by user from plugins - if (customDistributionUrl != null) { - addIvyRepo(project, DOWNLOAD_REPO_NAME, customDistributionUrl.toString(), FAKE_IVY_GROUP, ""); - addIvyRepo(project, SNAPSHOT_REPO_NAME, customDistributionUrl.toString(), FAKE_SNAPSHOT_IVY_GROUP, ""); - } else { - addIvyRepo( - project, - DOWNLOAD_REPO_NAME, - "https://artifacts.opensearch.org", - FAKE_IVY_GROUP, - "/releases" + RELEASE_PATTERN_LAYOUT, - "/release-candidates" + RELEASE_PATTERN_LAYOUT - ); - addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT); - } + addIvyRepo( + project, + DOWNLOAD_REPO_NAME, + "https://artifacts.opensearch.org", + FAKE_IVY_GROUP, + "/releases" + RELEASE_PATTERN_LAYOUT, + "/release-candidates" + RELEASE_PATTERN_LAYOUT + ); + addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT); addIvyRepo2(project, DOWNLOAD_REPO_NAME_ES, "https://artifacts-no-kpi.elastic.co", FAKE_IVY_GROUP_ES); addIvyRepo2(project, SNAPSHOT_REPO_NAME_ES, "https://snapshots-no-kpi.elastic.co", FAKE_SNAPSHOT_IVY_GROUP_ES); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/LoggedExec.java b/buildSrc/src/main/java/org/opensearch/gradle/LoggedExec.java index 2831108b94452..0512ed72f5e47 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/LoggedExec.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/LoggedExec.java @@ -64,7 +64,6 @@ /** * A wrapper around gradle's Exec task to capture output and log on error. */ -@SuppressWarnings("unchecked") public class LoggedExec extends Exec implements FileSystemOperationsAware { private static final Logger LOGGER = Logging.getLogger(LoggedExec.class); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchJavaPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchJavaPlugin.java index 2750503334baf..c701c47f9e68c 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchJavaPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchJavaPlugin.java @@ -63,8 +63,8 @@ import org.gradle.language.base.plugins.LifecycleBasePlugin; import java.io.File; +import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.function.Consumer; import static org.opensearch.gradle.util.Util.toStringable; @@ -173,7 +173,10 @@ public static void configureCompile(Project project) { // workaround for https://github.com/gradle/gradle/issues/14141 compileTask.getConventionMapping().map("sourceCompatibility", () -> java.getSourceCompatibility().toString()); compileTask.getConventionMapping().map("targetCompatibility", () -> java.getTargetCompatibility().toString()); - compileOptions.getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); + // The '--release is available from JDK-9 and above + if (BuildParams.getRuntimeJavaVersion().compareTo(JavaVersion.VERSION_1_8) > 0) { + compileOptions.getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); + } }); // also apply release flag to groovy, which is used in build-tools project.getTasks().withType(GroovyCompile.class).configureEach(compileTask -> { @@ -212,10 +215,12 @@ static void configureJars(Project project) { public void execute(Task task) { // this doFirst is added before the info plugin, therefore it will run // after the doFirst added by the info plugin, and we can override attributes - jarTask.getManifest() - .attributes( - Map.of("Build-Date", BuildParams.getBuildDate(), "Build-Java-Version", BuildParams.getGradleJavaVersion()) - ); + jarTask.getManifest().attributes(new HashMap() { + { + put("Build-Date", BuildParams.getBuildDate()); + put("Build-Java-Version", BuildParams.getGradleJavaVersion()); + } + }); } }); }); @@ -265,7 +270,9 @@ private static void configureJavadoc(Project project) { * that the default will change to html5 in the future. */ CoreJavadocOptions javadocOptions = (CoreJavadocOptions) javadoc.getOptions(); - javadocOptions.addBooleanOption("html5", true); + if (BuildParams.getRuntimeJavaVersion().compareTo(JavaVersion.VERSION_1_8) > 0) { + javadocOptions.addBooleanOption("html5", true); + } }); TaskProvider javadoc = project.getTasks().withType(Javadoc.class).named("javadoc"); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java index 8a972bfa37e78..9d6e78014916d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java @@ -49,6 +49,7 @@ import org.gradle.api.tasks.testing.Test; import java.io.File; +import java.util.HashMap; import java.util.Map; import static org.opensearch.gradle.util.FileUtils.mkdirs; @@ -95,7 +96,7 @@ public void apply(Project project) { // We specifically use an anonymous inner class here because lambda task actions break Gradle cacheability // See: https://docs.gradle.org/current/userguide/more_about_tasks.html#sec:how_does_it_work - test.doFirst(new Action<>() { + test.doFirst(new Action() { @Override public void execute(Task t) { mkdirs(testOutputDir); @@ -137,20 +138,16 @@ public void execute(Task t) { test.jvmArgs("-ea", "-esa"); } - Map sysprops = Map.of( - "java.awt.headless", - "true", - "tests.gradle", - "true", - "tests.artifact", - project.getName(), - "tests.task", - test.getPath(), - "tests.security.manager", - "true", - "jna.nosys", - "true" - ); + Map sysprops = new HashMap() { + { + put("java.awt.headless", "true"); + put("tests.gradle", "true"); + put("tests.artifact", project.getName()); + put("tests.task", test.getPath()); + put("tests.security.manager", "true"); + put("jna.nosys", "true"); + } + }; test.systemProperties(sysprops); // ignore changing test seed when build is passed -Dignore.tests.seed for cacheability experimentation diff --git a/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java index d5143f43ab70e..16e5cba4b5b23 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java @@ -56,6 +56,7 @@ public void apply(Project project) { .resolve(".gradle") .resolve("reaper") .resolve("build-" + ProcessHandle.current().pid()); + ReaperService service = project.getExtensions() .create("reaper", ReaperService.class, project, project.getBuildDir().toPath(), inputDir); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/docker/DockerSupportService.java b/buildSrc/src/main/java/org/opensearch/gradle/docker/DockerSupportService.java index 2cb977117858b..2eb2852e3e55e 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/docker/DockerSupportService.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/docker/DockerSupportService.java @@ -49,6 +49,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -280,7 +281,7 @@ static Map parseOsRelease(final List osReleaseLines) { */ private Optional getDockerPath() { // Check if the Docker binary exists - return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); + return Arrays.asList(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); } /** @@ -291,7 +292,7 @@ private Optional getDockerPath() { */ private Optional getDockerComposePath() { // Check if the Docker binary exists - return List.of(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); + return Arrays.asList(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); } private void throwDockerRequiredException(final String message) { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java index 13aa2e43af313..11270e5c9a51d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java @@ -109,7 +109,7 @@ public void apply(Project project) { }); TaskProvider fetchLatestTaskProvider = tasks.register("fetchLatest", LoggedExec.class, fetchLatest -> { - var gitFetchLatest = project.getProviders() + Provider gitFetchLatest = project.getProviders() .systemProperty("tests.bwc.git_fetch_latest") .forUseAtConfigurationTime() .orElse("true") @@ -122,7 +122,7 @@ public void apply(Project project) { } throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]"); }); - fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get()); + fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get() != null); fetchLatest.dependsOn(addRemoteTaskProvider); fetchLatest.setWorkingDir(gitExtension.getCheckoutDir().get()); fetchLatest.setCommandLine(asList("git", "fetch", "--all")); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index e0acdd11a6f76..8adfbff424278 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -39,6 +39,7 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; +import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.logging.Logger; @@ -46,6 +47,7 @@ import org.gradle.api.tasks.AbstractCopyTask; import org.gradle.api.tasks.Sync; import org.gradle.api.tasks.TaskContainer; +import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.bundling.AbstractArchiveTask; import org.gradle.api.tasks.bundling.Compression; import org.gradle.api.tasks.bundling.Zip; @@ -105,14 +107,18 @@ private Action configure(String name) { private void registerAndConfigureDistributionArchivesExtension(Project project) { container = project.container(DistributionArchive.class, name -> { - var subProjectDir = archiveToSubprojectName(name); - var copyDistributionTaskName = "build" + capitalize(name.substring(0, name.length() - 3)); + String subProjectDir = archiveToSubprojectName(name); + String copyDistributionTaskName = "build" + capitalize(name.substring(0, name.length() - 3)); TaskContainer tasks = project.getTasks(); - var explodedDist = tasks.register(copyDistributionTaskName, Sync.class, sync -> sync.into(subProjectDir + "/build/install/")); + TaskProvider explodedDist = tasks.register( + copyDistributionTaskName, + Sync.class, + sync -> sync.into(subProjectDir + "/build/install/") + ); explodedDist.configure(configure(name)); - var archiveTaskName = "build" + capitalize(name); + String archiveTaskName = "build" + capitalize(name); - var archiveTask = name.endsWith("Tar") + TaskProvider archiveTask = name.endsWith("Tar") ? tasks.register(archiveTaskName, SymbolicLinkPreservingTar.class) : tasks.register(archiveTaskName, Zip.class); archiveTask.configure(configure(name)); @@ -122,11 +128,11 @@ private void registerAndConfigureDistributionArchivesExtension(Project project) // Each defined distribution archive is linked to a subproject. // A distribution archive definition not matching a sub project will result in build failure. container.whenObjectAdded(distributionArchive -> { - var subProjectName = archiveToSubprojectName(distributionArchive.getName()); + String subProjectName = archiveToSubprojectName(distributionArchive.getName()); project.project(subProjectName, sub -> { sub.getPlugins().apply(BasePlugin.class); sub.getArtifacts().add(DEFAULT_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); - var extractedConfiguration = sub.getConfigurations().create("extracted"); + Configuration extractedConfiguration = sub.getConfigurations().create("extracted"); extractedConfiguration.setCanBeResolved(false); extractedConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); sub.getArtifacts().add(EXTRACTED_CONFIGURATION_NAME, distributionArchive.getExpandedDistTask()); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenApisPrecommitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenApisPrecommitPlugin.java index 684710a4c23ba..328edda8b1787 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenApisPrecommitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenApisPrecommitPlugin.java @@ -48,8 +48,9 @@ import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; import java.util.List; -import java.util.Set; public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin { @Override @@ -90,14 +91,14 @@ public TaskProvider createTask(Project project) { // TODO: forbidden apis does not yet support java 15, rethink using runtime version t.setTargetCompatibility(JavaVersion.VERSION_14.getMajorVersion()); } - t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out")); + t.setBundledSignatures(new HashSet<>(Arrays.asList("jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out"))); t.setSignaturesFiles( project.files( resourcesDir.resolve("forbidden/jdk-signatures.txt"), resourcesDir.resolve("forbidden/opensearch-all-signatures.txt") ) ); - t.setSuppressAnnotations(Set.of("**.SuppressForbidden")); + t.setSuppressAnnotations(new HashSet<>(Arrays.asList("**.SuppressForbidden"))); if (t.getName().endsWith("Test")) { t.setSignaturesFiles( t.getSignaturesFiles() diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ValidateJsonNoKeywordsTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ValidateJsonNoKeywordsTask.java index 96e98ae9d980c..b3ac804566e29 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ValidateJsonNoKeywordsTask.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ValidateJsonNoKeywordsTask.java @@ -51,6 +51,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -126,14 +127,17 @@ public void validate(InputChanges inputChanges) { final JsonNode jsonNode = mapper.readTree(file); if (jsonNode.isObject() == false) { - errors.put(file, Set.of("Expected an object, but found: " + jsonNode.getNodeType())); + errors.put(file, new HashSet<>(Arrays.asList("Expected an object, but found: " + jsonNode.getNodeType()))); return; } final ObjectNode rootNode = (ObjectNode) jsonNode; if (rootNode.size() != 1) { - errors.put(file, Set.of("Expected an object with exactly 1 key, but found " + rootNode.size() + " keys")); + errors.put( + file, + new HashSet<>(Arrays.asList("Expected an object with exactly 1 key, but found " + rootNode.size() + " keys")) + ); return; } @@ -148,7 +152,7 @@ public void validate(InputChanges inputChanges) { } } } catch (IOException e) { - errors.put(file, Set.of("Failed to load file: " + e.getMessage())); + errors.put(file, new HashSet<>(Arrays.asList("Failed to load file: " + e.getMessage()))); } }); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java index 68fab04c2217f..a77155aacf723 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java @@ -70,6 +70,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; import java.util.function.Supplier; import java.util.stream.Stream; @@ -204,7 +205,7 @@ public void apply(Project project) { vmDependencies ); } else { - for (var entry : linuxTestTasks.entrySet()) { + for (Entry>> entry : linuxTestTasks.entrySet()) { OpenSearchDistribution.Type type = entry.getKey(); TaskProvider vmLifecycleTask = vmLifecyleTasks.get(type); configureVMWrapperTasks(vmProject, entry.getValue(), depsTasks, wrapperTask -> { @@ -227,7 +228,7 @@ public void apply(Project project) { }, vmDependencies); } - for (var entry : upgradeTestTasks.entrySet()) { + for (Entry>> entry : upgradeTestTasks.entrySet()) { String version = entry.getKey(); TaskProvider vmVersionTask = vmVersionTasks.get(version); configureVMWrapperTasks( @@ -321,7 +322,12 @@ private static Object convertPath( private static Configuration configureExamplePlugin(Project project) { Configuration examplePlugin = project.getConfigurations().create(EXAMPLE_PLUGIN_CONFIGURATION); DependencyHandler deps = project.getDependencies(); - Map examplePluginProject = Map.of("path", ":example-plugins:custom-settings", "configuration", "zip"); + Map examplePluginProject = new HashMap() { + { + put("path", ":example-plugins:custom-settings"); + put("configuration", "zip"); + } + }; deps.add(EXAMPLE_PLUGIN_CONFIGURATION, deps.project(examplePluginProject)); return examplePlugin; } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java index 5e8194556e98f..399cd39d236d7 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java @@ -164,7 +164,7 @@ void copy() { getFileSystemOperations().copy(c -> { c.from(getArchiveOperations().zipTree(coreConfig.getSingleFile())); // this ends up as the same dir as outputDir - c.into(Objects.requireNonNull(getSourceSet().orElseThrow().getOutput().getResourcesDir())); + c.into(Objects.requireNonNull(getSourceSet().get().getOutput().getResourcesDir())); if (includeCore.get().isEmpty()) { c.include(REST_API_PREFIX + "/**"); } else { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java index 8204aea1ae8ba..56ce449f4cf6f 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java @@ -155,7 +155,7 @@ void copy() { getFileSystemOperations().copy(c -> { c.from(getArchiveOperations().zipTree(coreConfig.getSingleFile())); // this ends up as the same dir as outputDir - c.into(Objects.requireNonNull(getSourceSet().orElseThrow().getOutput().getResourcesDir())); + c.into(Objects.requireNonNull(getSourceSet().get().getOutput().getResourcesDir())); c.include( includeCore.get().stream().map(prefix -> REST_TEST_PREFIX + "/" + prefix + "*/**").collect(Collectors.toList()) ); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java index 1b0b6953d1507..728e36ce98bff 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java @@ -41,7 +41,7 @@ import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; -import java.util.Map; +import java.util.HashMap; /** *

@@ -88,8 +88,12 @@ public void apply(Project project) { task.sourceSetName = SourceSet.TEST_SOURCE_SET_NAME; if (BuildParams.isInternal()) { // core - Dependency restTestdependency = project.getDependencies() - .project(Map.of("path", ":rest-api-spec", "configuration", "restTests")); + Dependency restTestdependency = project.getDependencies().project(new HashMap() { + { + put("path", ":rest-api-spec"); + put("configuration", "restTests"); + } + }); project.getDependencies().add(task.coreConfig.getName(), restTestdependency); } else { Dependency dependency = project.getDependencies() @@ -109,8 +113,12 @@ public void apply(Project project) { task.coreConfig = specConfig; task.sourceSetName = SourceSet.TEST_SOURCE_SET_NAME; if (BuildParams.isInternal()) { - Dependency restSpecDependency = project.getDependencies() - .project(Map.of("path", ":rest-api-spec", "configuration", "restSpecs")); + Dependency restSpecDependency = project.getDependencies().project(new HashMap() { + { + put("path", ":rest-api-spec"); + put("configuration", "restSpecs"); + } + }); project.getDependencies().add(task.coreConfig.getName(), restSpecDependency); } else { Dependency dependency = project.getDependencies() diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java index 9e6984fd45007..a94ebacd460a5 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java @@ -404,11 +404,6 @@ public void upgradeAllNodesAndPluginsToNextVersion(List> p writeUnicastHostsFiles(); } - public void fullRestart() { - stop(false); - start(); - } - public void nextNodeToNextVersion() { OpenSearchNode node = upgradeNodeToNextVersion(); node.start(); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java index a99b118f7176d..b051c15e81d6d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java @@ -385,8 +385,12 @@ Collection getPluginAndModuleConfigurations() { private Provider maybeCreatePluginOrModuleDependency(String path) { Configuration configuration = pluginAndModuleConfigurations.computeIfAbsent( path, - key -> project.getConfigurations() - .detachedConfiguration(project.getDependencies().project(Map.of("path", path, "configuration", "zip"))) + key -> project.getConfigurations().detachedConfiguration(project.getDependencies().project(new HashMap() { + { + put("path", path); + put("configuration", "zip"); + } + })) ); Provider fileProvider = configuration.getElements() .map( @@ -679,10 +683,6 @@ void goToNextVersion() { setting("node.attr.upgraded", "true"); } - private boolean isSettingTrue(String name) { - return Boolean.valueOf(settings.getOrDefault(name, "false").toString()); - } - private void copyExtraConfigFiles() { if (extraConfigFiles.isEmpty() == false) { logToProcessStdout("Setting up " + extraConfigFiles.size() + " additional config files"); @@ -977,7 +977,7 @@ public synchronized void stop(boolean tailLogs) { LOGGER.info("Stopping `{}`, tailLogs: {}", this, tailLogs); requireNonNull(opensearchProcess, "Can't stop `" + this + "` as it was not started or already stopped."); // Test clusters are not reused, don't spend time on a graceful shutdown - stopHandle(opensearchProcess.toHandle(), true); + stopProcess(opensearchProcess.toHandle(), true); reaper.unregister(toString()); if (tailLogs) { logFileContents("Standard output of node", currentConfig.stdoutFile); @@ -1002,7 +1002,7 @@ public void setNameCustomization(Function nameCustomizer) { this.nameCustomization = nameCustomizer; } - private void stopHandle(ProcessHandle processHandle, boolean forcibly) { + private void stopProcess(ProcessHandle processHandle, boolean forcibly) { // No-op if the process has already exited by itself. if (processHandle.isAlive() == false) { LOGGER.info("Process was not running when we tried to terminate it."); @@ -1041,7 +1041,12 @@ private void stopHandle(ProcessHandle processHandle, boolean forcibly) { throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); } } finally { - children.forEach(each -> stopHandle(each, forcibly)); + children.forEach(each -> stopProcess(each, forcibly)); + } + + waitForProcessToExit(processHandle); + if (processHandle.isAlive()) { + throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClusterConfiguration.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClusterConfiguration.java index a46e6ca876b35..b27f205291269 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClusterConfiguration.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClusterConfiguration.java @@ -172,11 +172,9 @@ default void waitForConditions( } else { String extraCause = ""; Throwable cause = lastException; - int ident = 2; while (cause != null) { if (cause.getMessage() != null && cause.getMessage().isEmpty() == false) { extraCause += "\n" + " " + cause.getMessage(); - ident += 2; } cause = cause.getCause(); } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java index 1a55052f53004..ae1db26fbc48d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java @@ -64,9 +64,9 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; +import java.util.Arrays; import java.util.Collections; import java.util.function.BiConsumer; -import java.util.List; import java.util.Optional; public class TestFixturesPlugin implements Plugin { @@ -162,7 +162,7 @@ public void execute(Task task) { final Integer timeout = ext.has("dockerComposeHttpTimeout") ? (Integer) ext.get("dockerComposeHttpTimeout") : 120; composeExtension.getEnvironment().put("COMPOSE_HTTP_TIMEOUT", timeout); - Optional dockerCompose = List.of(DOCKER_COMPOSE_BINARIES) + Optional dockerCompose = Arrays.asList(DOCKER_COMPOSE_BINARIES) .stream() .filter(path -> project.file(path).exists()) .findFirst(); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java b/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java index 162121db97797..5ff8168a9bed2 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java @@ -94,6 +94,5 @@ public void unpack(File tarFile, File targetDir) throws IOException { entry = tar.getNextTarEntry(); } } - } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java b/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java index 01622e6df3ee1..054f01788d126 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java @@ -55,6 +55,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -168,7 +169,11 @@ public static void setupIdeForTestSourceSet(Project project, SourceSet testSourc project.getPluginManager().withPlugin("idea", p -> { IdeaModel idea = project.getExtensions().getByType(IdeaModel.class); idea.getModule().setTestSourceDirs(testSourceSet.getJava().getSrcDirs()); - idea.getModule().getScopes().put(testSourceSet.getName(), Map.of("plus", List.of(runtimeClasspathConfiguration))); + idea.getModule().getScopes().put(testSourceSet.getName(), new HashMap>() { + { + put("plus", Arrays.asList(runtimeClasspathConfiguration)); + } + }); }); project.getPluginManager().withPlugin("eclipse", p -> { EclipseModel eclipse = project.getExtensions().getByType(EclipseModel.class); diff --git a/buildSrc/src/main/resources/minimumRuntimeVersion b/buildSrc/src/main/resources/minimumRuntimeVersion index 468437494697b..9d607966b721a 100644 --- a/buildSrc/src/main/resources/minimumRuntimeVersion +++ b/buildSrc/src/main/resources/minimumRuntimeVersion @@ -1 +1 @@ -1.8 \ No newline at end of file +11 \ No newline at end of file diff --git a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java index d6299311e1d9b..98feb3ef2ac93 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java @@ -38,7 +38,6 @@ import org.opensearch.gradle.test.GradleUnitTestCase; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; -import org.gradle.api.internal.artifacts.repositories.DefaultIvyArtifactRepository; import org.gradle.testfixtures.ProjectBuilder; import java.io.File; @@ -80,58 +79,6 @@ public void testVersionDefault() { assertEquals(distro.getVersion(), VersionProperties.getOpenSearch()); } - public void testCustomDistributionUrlWithUrl() { - Project project = createProject(null, false); - String customUrl = "https://artifacts.opensearch.org/custom"; - project.getExtensions().getExtraProperties().set("customDistributionUrl", customUrl); - DistributionDownloadPlugin plugin = project.getPlugins().getPlugin(DistributionDownloadPlugin.class); - plugin.setupDistributions(project); - assertEquals(4, project.getRepositories().size()); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(), - customUrl - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(), - customUrl - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(), - "https://artifacts-no-kpi.elastic.co" - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(), - "https://snapshots-no-kpi.elastic.co" - ); - } - - public void testCustomDistributionUrlWithoutUrl() { - Project project = createProject(null, false); - DistributionDownloadPlugin plugin = project.getPlugins().getPlugin(DistributionDownloadPlugin.class); - plugin.setupDistributions(project); - assertEquals(5, project.getRepositories().size()); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(), - "https://artifacts.opensearch.org" - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads2")).getUrl().toString(), - "https://artifacts.opensearch.org" - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(), - "https://artifacts.opensearch.org" - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(), - "https://artifacts-no-kpi.elastic.co" - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(), - "https://snapshots-no-kpi.elastic.co" - ); - } - public void testBadVersionFormat() { assertDistroError( createProject(null, false), diff --git a/buildSrc/src/test/java/org/opensearch/gradle/docker/DockerSupportServiceTests.java b/buildSrc/src/test/java/org/opensearch/gradle/docker/DockerSupportServiceTests.java index e1891acc74dd5..e2acf3c087ecc 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/docker/DockerSupportServiceTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/docker/DockerSupportServiceTests.java @@ -33,6 +33,7 @@ import org.opensearch.gradle.test.GradleIntegrationTestCase; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -44,7 +45,7 @@ public class DockerSupportServiceTests extends GradleIntegrationTestCase { public void testParseOsReleaseOnOracle() { - final List lines = List.of( + final List lines = Arrays.asList( "NAME=\"Oracle Linux Server\"", "VERSION=\"6.10\"", "ID=\"ol\"", @@ -85,11 +86,15 @@ public void testParseOsReleaseOnOracle() { * Trailing whitespace should be removed */ public void testRemoveTrailingWhitespace() { - final List lines = List.of("NAME=\"Oracle Linux Server\" "); + final List lines = Arrays.asList("NAME=\"Oracle Linux Server\" "); final Map results = parseOsRelease(lines); - final Map expected = Map.of("NAME", "oracle linux server"); + final Map expected = new HashMap() { + { + put("NAME", "oracle linux server"); + } + }; assertThat(expected, equalTo(results)); } @@ -98,11 +103,15 @@ public void testRemoveTrailingWhitespace() { * Comments should be removed */ public void testRemoveComments() { - final List lines = List.of("# A comment", "NAME=\"Oracle Linux Server\""); + final List lines = Arrays.asList("# A comment", "NAME=\"Oracle Linux Server\""); final Map results = parseOsRelease(lines); - final Map expected = Map.of("NAME", "oracle linux server"); + final Map expected = new HashMap() { + { + put("NAME", "oracle linux server"); + } + }; assertThat(expected, equalTo(results)); } diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle index 57010739dfc44..c8c89fb5e4273 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle @@ -16,7 +16,7 @@ repositories { mavenCentral() } dependencies { - implementation 'org.apache.logging.log4j:log4j-core:2.17.1' + implementation 'org.apache.logging.log4j:log4j-core:2.17.2' } ["0.0.1", "0.0.2"].forEach { v -> diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/bulk/RestNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/bulk/RestNoopBulkAction.java index 62123870f0099..1e94939bed7b5 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/bulk/RestNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/bulk/RestNoopBulkAction.java @@ -67,9 +67,7 @@ public List routes() { new Route(POST, "/_noop_bulk"), new Route(PUT, "/_noop_bulk"), new Route(POST, "/{index}/_noop_bulk"), - new Route(PUT, "/{index}/_noop_bulk"), - new Route(POST, "/{index}/{type}/_noop_bulk"), - new Route(PUT, "/{index}/{type}/_noop_bulk") + new Route(PUT, "/{index}/_noop_bulk") ) ); } @@ -83,7 +81,6 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { BulkRequest bulkRequest = Requests.bulkRequest(); String defaultIndex = request.param("index"); - String defaultType = request.param("type"); String defaultRouting = request.param("routing"); String defaultPipeline = request.param("pipeline"); Boolean defaultRequireAlias = request.paramAsBoolean("require_alias", null); diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/search/RestNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/search/RestNoopSearchAction.java index 3f7be0446532e..a6573cd488e4b 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/search/RestNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/search/RestNoopSearchAction.java @@ -53,9 +53,7 @@ public List routes() { new Route(GET, "/_noop_search"), new Route(POST, "/_noop_search"), new Route(GET, "/{index}/_noop_search"), - new Route(POST, "/{index}/_noop_search"), - new Route(GET, "/{index}/{type}/_noop_search"), - new Route(POST, "/{index}/{type}/_noop_search") + new Route(POST, "/{index}/_noop_search") ) ); } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 4144186ba5f70..07147ce81b72e 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -92,7 +92,7 @@ check.dependsOn(asyncIntegTest) testClusters.all { testDistribution = 'ARCHIVE' systemProperty 'opensearch.scripting.update.ctx_in_params', 'false' - setting 'reindex.remote.whitelist', '[ "[::1]:*", "127.0.0.1:*" ]' + setting 'reindex.remote.allowlist', '[ "[::1]:*", "127.0.0.1:*" ]' extraConfigFile 'roles.yml', file('roles.yml') user username: System.getProperty('tests.rest.cluster.username', 'test_user'), diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java index 00b07fc4881bd..9b4586ec6bf89 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java @@ -361,60 +361,6 @@ public Cancellable dataStreamsStatsAsync( ); } - /** - * Creates an index using the Create Index API. - * - * @param createIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The - * method {@link #create(CreateIndexRequest, RequestOptions)} should be used instead, which accepts a new - * request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.create.CreateIndexResponse create( - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - createIndexRequest, - IndicesRequestConverters::createIndex, - options, - org.opensearch.action.admin.indices.create.CreateIndexResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously creates an index using the Create Index API. - * - * @param createIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The - * method {@link #createAsync(CreateIndexRequest, RequestOptions, ActionListener)} should be used instead, - * which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable createAsync( - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - createIndexRequest, - IndicesRequestConverters::createIndex, - options, - org.opensearch.action.admin.indices.create.CreateIndexResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Updates the mappings on an index using the Put Mapping API. * @@ -456,59 +402,6 @@ public Cancellable putMappingAsync( ); } - /** - * Updates the mappings on an index using the Put Mapping API. - * - * @param putMappingRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #putMapping(PutMappingRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public AcknowledgedResponse putMapping( - org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - putMappingRequest, - IndicesRequestConverters::putMapping, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously updates the mappings on an index using the Put Mapping API. - * - * @param putMappingRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The - * method {@link #putMappingAsync(PutMappingRequest, RequestOptions, ActionListener)} should be used instead, - * which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable putMappingAsync( - org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - putMappingRequest, - IndicesRequestConverters::putMapping, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Retrieves the mappings on an index or indices using the Get Mapping API. * @@ -550,114 +443,6 @@ public Cancellable getMappingAsync( ); } - /** - * Retrieves the mappings on an index or indices using the Get Mapping API. - * - * @param getMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated - * feature. The method {@link #getMapping(GetMappingsRequest, RequestOptions)} should be used instead, which - * accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse getMapping( - org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest getMappingsRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getMappingsRequest, - IndicesRequestConverters::getMappings, - options, - org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously retrieves the mappings on an index on indices using the Get Mapping API. - * - * @param getMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated feature. - * The method {@link #getMapping(GetMappingsRequest, RequestOptions)} should be used instead, which accepts a new - * request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getMappingAsync( - org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest getMappingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getMappingsRequest, - IndicesRequestConverters::getMappings, - options, - org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Retrieves the field mappings on an index or indices using the Get Field Mapping API. - * - * @param getFieldMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated feature. - * The method {@link #getFieldMapping(GetFieldMappingsRequest, RequestOptions)} should be used instead, which - * accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse getFieldMapping( - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getFieldMappingsRequest, - IndicesRequestConverters::getFieldMapping, - options, - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously retrieves the field mappings on an index on indices using the Get Field Mapping API. - * - * @param getFieldMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated feature. - * The method {@link #getFieldMappingAsync(GetFieldMappingsRequest, RequestOptions, ActionListener)} should be - * used instead, which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getFieldMappingAsync( - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getFieldMappingsRequest, - IndicesRequestConverters::getFieldMapping, - options, - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Retrieves the field mappings on an index or indices using the Get Field Mapping API. * @@ -1008,56 +793,6 @@ public Cancellable getAsync(GetIndexRequest getIndexRequest, RequestOptions opti ); } - /** - * Retrieve information about one or more indexes - * - * @param getIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #get(GetIndexRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.get.GetIndexResponse get( - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getIndexRequest, - IndicesRequestConverters::getIndex, - options, - org.opensearch.action.admin.indices.get.GetIndexResponse::fromXContent, - emptySet() - ); - } - - /** - * Retrieve information about one or more indexes - * - * @param getIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #getAsync(GetIndexRequest, RequestOptions, ActionListener)} should be used instead, which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getAsync( - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getIndexRequest, - IndicesRequestConverters::getIndex, - options, - org.opensearch.action.admin.indices.get.GetIndexResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Force merge one or more indices using the Force Merge API. * @@ -1210,53 +945,6 @@ public Cancellable existsAsync(GetIndexRequest request, RequestOptions options, ); } - /** - * Checks if the index (indices) exists or not. - * - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #exists(GetIndexRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public boolean exists(org.opensearch.action.admin.indices.get.GetIndexRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - IndicesRequestConverters::indicesExist, - options, - RestHighLevelClient::convertExistsResponse, - Collections.emptySet() - ); - } - - /** - * Asynchronously checks if the index (indices) exists or not. - * - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #existsAsync(GetIndexRequest, RequestOptions, ActionListener)} should be used instead, which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable existsAsync( - org.opensearch.action.admin.indices.get.GetIndexRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsync( - request, - IndicesRequestConverters::indicesExist, - options, - RestHighLevelClient::convertExistsResponse, - listener, - Collections.emptySet() - ); - } - /** * Shrinks an index using the Shrink Index API. * @@ -1549,59 +1237,6 @@ public Cancellable rolloverAsync(RolloverRequest rolloverRequest, RequestOptions ); } - /** - * Rolls over an index using the Rollover Index API. - * - * @param rolloverRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses deprecated request and response objects. - * The method {@link #rollover(RolloverRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.rollover.RolloverResponse rollover( - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - rolloverRequest, - IndicesRequestConverters::rollover, - options, - org.opensearch.action.admin.indices.rollover.RolloverResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously rolls over an index using the Rollover Index API. - * - * @param rolloverRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses deprecated request and response objects. - * The method {@link #rolloverAsync(RolloverRequest, RequestOptions, ActionListener)} should be used instead, which - * accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable rolloverAsync( - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - rolloverRequest, - IndicesRequestConverters::rollover, - options, - org.opensearch.action.admin.indices.rollover.RolloverResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Gets one or more aliases using the Get Index Aliases API. * @@ -1684,57 +1319,6 @@ public Cancellable putSettingsAsync( ); } - /** - * Puts an index template using the Index Templates API. - * - * @param putIndexTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * @deprecated This old form of request allows types in mappings. Use {@link #putTemplate(PutIndexTemplateRequest, RequestOptions)} - * instead which introduces a new request object without types. - */ - @Deprecated - public AcknowledgedResponse putTemplate( - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - putIndexTemplateRequest, - IndicesRequestConverters::putTemplate, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously puts an index template using the Index Templates API. - * - * @param putIndexTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This old form of request allows types in mappings. - * Use {@link #putTemplateAsync(PutIndexTemplateRequest, RequestOptions, ActionListener)} - * instead which introduces a new request object without types. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable putTemplateAsync( - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - putIndexTemplateRequest, - IndicesRequestConverters::putTemplate, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Puts an index template using the Index Templates API. * @@ -1906,31 +1490,6 @@ public Cancellable validateQueryAsync( ); } - /** - * Gets index templates using the Index Templates API. The mappings will be returned in a legacy deprecated format, where the - * mapping definition is nested under the type name. - * - * @param getIndexTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * @deprecated This method uses an old response object which still refers to types, a deprecated feature. Use - * {@link #getIndexTemplate(GetIndexTemplatesRequest, RequestOptions)} instead which returns a new response object - */ - @Deprecated - public org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate( - GetIndexTemplatesRequest getIndexTemplatesRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getIndexTemplatesRequest, - IndicesRequestConverters::getTemplatesWithDocumentTypes, - options, - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse::fromXContent, - emptySet() - ); - } - /** * Gets index templates using the Index Templates API * @@ -1994,33 +1553,6 @@ public GetIndexTemplatesResponse getIndexTemplate(GetIndexTemplatesRequest getIn ); } - /** - * Asynchronously gets index templates using the Index Templates API. The mappings will be returned in a legacy deprecated format, - * where the mapping definition is nested under the type name. - * - * @param getIndexTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This method uses an old response object which still refers to types, a deprecated feature. Use - * {@link #getIndexTemplateAsync(GetIndexTemplatesRequest, RequestOptions, ActionListener)} instead which returns a new response object - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getTemplateAsync( - GetIndexTemplatesRequest getIndexTemplatesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getIndexTemplatesRequest, - IndicesRequestConverters::getTemplatesWithDocumentTypes, - options, - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Asynchronously gets index templates using the Index Templates API * diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java index 9979d18635d05..c50ea58982e4e 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java @@ -78,8 +78,6 @@ import java.io.IOException; import java.util.Locale; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - final class IndicesRequestConverters { private IndicesRequestConverters() {} @@ -165,20 +163,6 @@ static Request createIndex(CreateIndexRequest createIndexRequest) throws IOExcep return request; } - static Request createIndex(org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest) throws IOException { - String endpoint = RequestConverters.endpoint(createIndexRequest.indices()); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withTimeout(createIndexRequest.timeout()); - parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); - parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, "/_aliases"); @@ -202,31 +186,6 @@ static Request putMapping(PutMappingRequest putMappingRequest) throws IOExceptio return request; } - /** - * converter for the legacy server-side {@link org.opensearch.action.admin.indices.mapping.put.PutMappingRequest} that still supports - * types - */ - @Deprecated - static Request putMapping(org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest) throws IOException { - // The concreteIndex is an internal concept, not applicable to requests made over the REST API. - if (putMappingRequest.getConcreteIndex() != null) { - throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API"); - } - - Request request = new Request( - HttpPut.METHOD_NAME, - RequestConverters.endpoint(putMappingRequest.indices(), "_mapping", putMappingRequest.type()) - ); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withTimeout(putMappingRequest.timeout()); - parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request getMappings(GetMappingsRequest getMappingsRequest) { String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices(); @@ -240,22 +199,6 @@ static Request getMappings(GetMappingsRequest getMappingsRequest) { return request; } - @Deprecated - static Request getMappings(org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest getMappingsRequest) { - String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices(); - String[] types = getMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.types(); - - Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping", types)); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); - parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); - parameters.withLocal(getMappingsRequest.local()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - return request; - } - static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) { String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); @@ -275,30 +218,6 @@ static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) return request; } - @Deprecated - static Request getFieldMapping(org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest) { - String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); - String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types(); - String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); - - String endpoint = new RequestConverters.EndpointBuilder().addCommaSeparatedPathParts(indices) - .addPathPartAsIs("_mapping") - .addCommaSeparatedPathParts(types) - .addPathPartAsIs("field") - .addCommaSeparatedPathParts(fields) - .build(); - - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); - parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); - parameters.withLocal(getFieldMappingsRequest.local()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - return request; - } - static Request refresh(RefreshRequest refreshRequest) { String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh")); @@ -456,27 +375,6 @@ static Request rollover(RolloverRequest rolloverRequest) throws IOException { return request; } - @Deprecated - static Request rollover(org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest) throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getRolloverTarget()) - .addPathPartAsIs("_rollover") - .addPathPart(rolloverRequest.getNewIndexName()) - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withTimeout(rolloverRequest.timeout()); - params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); - params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); - if (rolloverRequest.isDryRun()) { - params.putParam("dry_run", Boolean.TRUE.toString()); - } - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - request.addParameters(params.asMap()); - return request; - } - static Request getSettings(GetSettingsRequest getSettingsRequest) { String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices(); String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names(); @@ -493,28 +391,6 @@ static Request getSettings(GetSettingsRequest getSettingsRequest) { return request; } - /** - * converter for the legacy server-side {@link org.opensearch.action.admin.indices.get.GetIndexRequest} that - * still supports types - */ - @Deprecated - static Request getIndex(org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest) { - String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); - - String endpoint = RequestConverters.endpoint(indices); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withIndicesOptions(getIndexRequest.indicesOptions()); - params.withLocal(getIndexRequest.local()); - params.withIncludeDefaults(getIndexRequest.includeDefaults()); - params.withHuman(getIndexRequest.humanReadable()); - params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - return request; - } - static Request getIndex(GetIndexRequest getIndexRequest) { String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); @@ -531,28 +407,6 @@ static Request getIndex(GetIndexRequest getIndexRequest) { return request; } - /** - * converter for the legacy server-side {@link org.opensearch.action.admin.indices.get.GetIndexRequest} that - * still supports types - */ - @Deprecated - static Request indicesExist(org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest) { - if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { - throw new IllegalArgumentException("indices are mandatory"); - } - String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), ""); - Request request = new Request(HttpHead.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withLocal(getIndexRequest.local()); - params.withHuman(getIndexRequest.humanReadable()); - params.withIndicesOptions(getIndexRequest.indicesOptions()); - params.withIncludeDefaults(getIndexRequest.includeDefaults()); - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - return request; - } - static Request indicesExist(GetIndexRequest getIndexRequest) { if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { throw new IllegalArgumentException("indices are mandatory"); @@ -583,31 +437,6 @@ static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) thr return request; } - /** - * @deprecated This uses the old form of PutIndexTemplateRequest which uses types. - * Use (@link {@link #putTemplate(PutIndexTemplateRequest)} instead - */ - @Deprecated - static Request putTemplate(org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest) - throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") - .addPathPart(putIndexTemplateRequest.name()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); - if (putIndexTemplateRequest.create()) { - params.putParam("create", Boolean.TRUE.toString()); - } - if (Strings.hasText(putIndexTemplateRequest.cause())) { - params.putParam("cause", putIndexTemplateRequest.cause()); - } - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") .addPathPart(putIndexTemplateRequest.name()) @@ -669,8 +498,7 @@ static Request simulateIndexTemplate(SimulateIndexTemplateRequest simulateIndexT static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException { String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices(); - String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types(); - String endpoint = RequestConverters.endpoint(indices, types, "_validate/query"); + String endpoint = RequestConverters.endpoint(indices, "_validate/query"); Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withIndicesOptions(validateQueryRequest.indicesOptions()); @@ -694,16 +522,7 @@ static Request getAlias(GetAliasesRequest getAliasesRequest) { return request; } - @Deprecated - static Request getTemplatesWithDocumentTypes(GetIndexTemplatesRequest getIndexTemplatesRequest) { - return getTemplates(getIndexTemplatesRequest, true); - } - static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) { - return getTemplates(getIndexTemplatesRequest, false); - } - - private static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest, boolean includeTypeName) { final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") .addCommaSeparatedPathParts(getIndexTemplatesRequest.names()) .build(); @@ -711,9 +530,6 @@ private static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRe final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getIndexTemplatesRequest.isLocal()); params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); - if (includeTypeName) { - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - } request.addParameters(params.asMap()); return request; } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java index 06ceee78e5566..95188ec0f8e96 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java @@ -32,6 +32,7 @@ package org.opensearch.client; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.ToXContent; @@ -42,6 +43,10 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; /** * Base class for HLRC response parsing tests. @@ -99,4 +104,16 @@ protected ToXContent.Params getParams() { return ToXContent.EMPTY_PARAMS; } + protected static void assertMapEquals(ImmutableOpenMap expected, Map actual) { + Set expectedKeys = new HashSet<>(); + Iterator keysIt = expected.keysIt(); + while (keysIt.hasNext()) { + expectedKeys.add(keysIt.next()); + } + + assertEquals(expectedKeys, actual.keySet()); + for (String key : expectedKeys) { + assertEquals(expected.get(key), actual.get(key)); + } + } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java index 69ce518173042..f9c8851f8839e 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java @@ -122,15 +122,9 @@ import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.index.IndexSettings; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestStatus; -import org.opensearch.rest.action.admin.indices.RestCreateIndexAction; -import org.opensearch.rest.action.admin.indices.RestGetIndexTemplateAction; -import org.opensearch.rest.action.admin.indices.RestGetIndicesAction; -import org.opensearch.rest.action.admin.indices.RestPutIndexTemplateAction; -import org.opensearch.rest.action.admin.indices.RestRolloverIndexAction; import java.io.IOException; import java.util.Arrays; @@ -198,18 +192,6 @@ public void testIndicesExists() throws IOException { } } - public void testIndicesExistsWithTypes() throws IOException { - // Index present - String indexName = "test_index_exists_index_present"; - createIndex(indexName, Settings.EMPTY); - - org.opensearch.action.admin.indices.get.GetIndexRequest request = new org.opensearch.action.admin.indices.get.GetIndexRequest(); - request.indices(indexName); - - boolean response = execute(request, highLevelClient().indices()::exists, highLevelClient().indices()::existsAsync); - assertTrue(response); - } - @SuppressWarnings({ "unchecked", "rawtypes" }) public void testCreateIndex() throws IOException { { @@ -274,74 +256,6 @@ public void testCreateIndex() throws IOException { } } - @SuppressWarnings({ "unchecked", "rawtypes" }) - public void testCreateIndexWithTypes() throws IOException { - { - // Create index - String indexName = "plain_index"; - assertFalse(indexExists(indexName)); - - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = - new org.opensearch.action.admin.indices.create.CreateIndexRequest(indexName); - - org.opensearch.action.admin.indices.create.CreateIndexResponse createIndexResponse = execute( - createIndexRequest, - highLevelClient().indices()::create, - highLevelClient().indices()::createAsync, - expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(createIndexResponse.isAcknowledged()); - - assertTrue(indexExists(indexName)); - } - { - // Create index with mappings, aliases and settings - String indexName = "rich_index"; - assertFalse(indexExists(indexName)); - - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = - new org.opensearch.action.admin.indices.create.CreateIndexRequest(indexName); - - Alias alias = new Alias("alias_name"); - alias.filter("{\"term\":{\"year\":2016}}"); - alias.routing("1"); - createIndexRequest.alias(alias); - - Settings.Builder settings = Settings.builder(); - settings.put(SETTING_NUMBER_OF_REPLICAS, 2); - createIndexRequest.settings(settings); - - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder); - - org.opensearch.action.admin.indices.create.CreateIndexResponse createIndexResponse = execute( - createIndexRequest, - highLevelClient().indices()::create, - highLevelClient().indices()::createAsync, - expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(createIndexResponse.isAcknowledged()); - - Map getIndexResponse = getAsMap(indexName); - assertEquals("2", XContentMapValues.extractValue(indexName + ".settings.index.number_of_replicas", getIndexResponse)); - - Map aliasData = (Map) XContentMapValues.extractValue( - indexName + ".aliases.alias_name", - getIndexResponse - ); - assertNotNull(aliasData); - assertEquals("1", aliasData.get("index_routing")); - Map filter = (Map) aliasData.get("filter"); - Map term = (Map) filter.get("term"); - assertEquals(2016, term.get("year")); - - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); - } - } - public void testGetSettings() throws IOException { String indexName = "get_settings_index"; Settings basicSettings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build(); @@ -487,33 +401,6 @@ public void testGetIndex() throws IOException { assertEquals("integer", fieldMapping.get("type")); } - @SuppressWarnings("unchecked") - public void testGetIndexWithTypes() throws IOException { - String indexName = "get_index_test"; - Settings basicSettings = Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build(); - String mappings = "\"properties\":{\"field-1\":{\"type\":\"integer\"}}"; - createIndex(indexName, basicSettings, mappings); - - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest = - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices(indexName).includeDefaults(false); - org.opensearch.action.admin.indices.get.GetIndexResponse getIndexResponse = execute( - getIndexRequest, - highLevelClient().indices()::get, - highLevelClient().indices()::getAsync, - expectWarningsOnce(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE) - ); - - // default settings should be null - assertNull(getIndexResponse.getSetting(indexName, "index.refresh_interval")); - assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS)); - assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS)); - assertNotNull(getIndexResponse.getMappings().get(indexName)); - MappingMetadata mappingMetadata = getIndexResponse.getMappings().get(indexName).get("_doc"); - assertNotNull(mappingMetadata); - assertEquals("_doc", mappingMetadata.type()); - assertEquals("{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}", mappingMetadata.source().string()); - } - @SuppressWarnings("unchecked") public void testGetIndexWithDefaults() throws IOException { String indexName = "get_index_test"; @@ -1207,33 +1094,6 @@ public void testRollover() throws IOException { } } - public void testRolloverWithTypes() throws IOException { - highLevelClient().indices().create(new CreateIndexRequest("test").alias(new Alias("alias")), RequestOptions.DEFAULT); - highLevelClient().index(new IndexRequest("test").id("1").source("field", "value"), RequestOptions.DEFAULT); - highLevelClient().index( - new IndexRequest("test").id("2").source("field", "value").setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL), - RequestOptions.DEFAULT - ); - - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest = - new org.opensearch.action.admin.indices.rollover.RolloverRequest("alias", "test_new"); - rolloverRequest.addMaxIndexDocsCondition(1); - rolloverRequest.getCreateIndexRequest().mapping("_doc", "field2", "type=keyword"); - - org.opensearch.action.admin.indices.rollover.RolloverResponse rolloverResponse = execute( - rolloverRequest, - highLevelClient().indices()::rollover, - highLevelClient().indices()::rolloverAsync, - expectWarningsOnce(RestRolloverIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(rolloverResponse.isRolledOver()); - assertFalse(rolloverResponse.isDryRun()); - Map conditionStatus = rolloverResponse.getConditionStatus(); - assertTrue(conditionStatus.get("[max_docs: 1]")); - assertEquals("test", rolloverResponse.getOldIndex()); - assertEquals("test_new", rolloverResponse.getNewIndex()); - } - public void testGetAlias() throws IOException { { createIndex("index1", Settings.EMPTY); @@ -1622,38 +1482,6 @@ public void testIndexPutSettingNonExistent() throws IOException { ); } - @SuppressWarnings("unchecked") - public void testPutTemplateWithTypes() throws Exception { - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplateRequest = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("my-template") - .patterns(Arrays.asList("pattern-1", "name-*")) - .order(10) - .create(randomBoolean()) - .settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0")) - .mapping("doc", "host_name", "type=keyword", "description", "type=text") - .alias(new Alias("alias-1").indexRouting("abc")) - .alias(new Alias("{index}-write").searchRouting("xyz")); - - AcknowledgedResponse putTemplateResponse = execute( - putTemplateRequest, - highLevelClient().indices()::putTemplate, - highLevelClient().indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); - - Map templates = getAsMap("/_template/my-template"); - assertThat(templates.keySet(), hasSize(1)); - assertThat(extractValue("my-template.order", templates), equalTo(10)); - assertThat(extractRawValues("my-template.index_patterns", templates), contains("pattern-1", "name-*")); - assertThat(extractValue("my-template.settings.index.number_of_shards", templates), equalTo("3")); - assertThat(extractValue("my-template.settings.index.number_of_replicas", templates), equalTo("0")); - assertThat(extractValue("my-template.mappings.properties.host_name.type", templates), equalTo("keyword")); - assertThat(extractValue("my-template.mappings.properties.description.type", templates), equalTo("text")); - assertThat((Map) extractValue("my-template.aliases.alias-1", templates), hasEntry("index_routing", "abc")); - assertThat((Map) extractValue("my-template.aliases.{index}-write", templates), hasEntry("search_routing", "xyz")); - } - @SuppressWarnings("unchecked") public void testPutTemplate() throws Exception { PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest("my-template").patterns( @@ -1746,48 +1574,6 @@ public void testPutTemplateWithTypesUsingUntypedAPI() throws Exception { ); } - @SuppressWarnings("unchecked") - public void testPutTemplateWithNoTypesUsingTypedApi() throws Exception { - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplateRequest = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("my-template") - .patterns(Arrays.asList("pattern-1", "name-*")) - .order(10) - .create(randomBoolean()) - .settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0")) - .mapping( - "my_doc_type", - // Note that the declared type is missing from the mapping - "{ " - + "\"properties\":{" - + "\"host_name\": {\"type\":\"keyword\"}," - + "\"description\": {\"type\":\"text\"}" - + "}" - + "}", - XContentType.JSON - ) - .alias(new Alias("alias-1").indexRouting("abc")) - .alias(new Alias("{index}-write").searchRouting("xyz")); - - AcknowledgedResponse putTemplateResponse = execute( - putTemplateRequest, - highLevelClient().indices()::putTemplate, - highLevelClient().indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); - - Map templates = getAsMap("/_template/my-template"); - assertThat(templates.keySet(), hasSize(1)); - assertThat(extractValue("my-template.order", templates), equalTo(10)); - assertThat(extractRawValues("my-template.index_patterns", templates), contains("pattern-1", "name-*")); - assertThat(extractValue("my-template.settings.index.number_of_shards", templates), equalTo("3")); - assertThat(extractValue("my-template.settings.index.number_of_replicas", templates), equalTo("0")); - assertThat(extractValue("my-template.mappings.properties.host_name.type", templates), equalTo("keyword")); - assertThat(extractValue("my-template.mappings.properties.description.type", templates), equalTo("text")); - assertThat((Map) extractValue("my-template.aliases.alias-1", templates), hasEntry("index_routing", "abc")); - assertThat((Map) extractValue("my-template.aliases.{index}-write", templates), hasEntry("search_routing", "xyz")); - } - public void testPutTemplateBadRequests() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1869,157 +1655,6 @@ public void testInvalidValidateQuery() throws IOException { assertFalse(response.isValid()); } - // Tests the deprecated form of the API that returns templates with doc types (using the server-side's GetIndexTemplateResponse) - public void testCRUDIndexTemplateWithTypes() throws Exception { - RestHighLevelClient client = highLevelClient(); - - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplate1 = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("template-1") - .patterns(Arrays.asList("pattern-1", "name-1")) - .alias(new Alias("alias-1")); - assertThat( - execute( - putTemplate1, - client.indices()::putTemplate, - client.indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).isAcknowledged(), - equalTo(true) - ); - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplate2 = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("template-2") - .patterns(Arrays.asList("pattern-2", "name-2")) - .mapping("custom_doc_type", "name", "type=text") - .settings(Settings.builder().put("number_of_shards", "2").put("number_of_replicas", "0")); - assertThat( - execute( - putTemplate2, - client.indices()::putTemplate, - client.indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).isAcknowledged(), - equalTo(true) - ); - - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate1 = execute( - new GetIndexTemplatesRequest("template-1"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getTemplate1.getIndexTemplates(), hasSize(1)); - org.opensearch.cluster.metadata.IndexTemplateMetadata template1 = getTemplate1.getIndexTemplates().get(0); - assertThat(template1.name(), equalTo("template-1")); - assertThat(template1.patterns(), contains("pattern-1", "name-1")); - assertTrue(template1.aliases().containsKey("alias-1")); - - // Check the typed version of the call - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate2 = execute( - new GetIndexTemplatesRequest("template-2"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getTemplate2.getIndexTemplates(), hasSize(1)); - org.opensearch.cluster.metadata.IndexTemplateMetadata template2 = getTemplate2.getIndexTemplates().get(0); - assertThat(template2.name(), equalTo("template-2")); - assertThat(template2.patterns(), contains("pattern-2", "name-2")); - assertTrue(template2.aliases().isEmpty()); - assertThat(template2.settings().get("index.number_of_shards"), equalTo("2")); - assertThat(template2.settings().get("index.number_of_replicas"), equalTo("0")); - // Ugly deprecated form of API requires use of doc type to get at mapping object which is CompressedXContent - assertTrue(template2.mappings().containsKey("custom_doc_type")); - - List names = randomBoolean() ? Arrays.asList("*plate-1", "template-2") : Arrays.asList("template-*"); - GetIndexTemplatesRequest getBothRequest = new GetIndexTemplatesRequest(names); - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getBoth = execute( - getBothRequest, - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getBoth.getIndexTemplates(), hasSize(2)); - assertThat( - getBoth.getIndexTemplates().stream().map(org.opensearch.cluster.metadata.IndexTemplateMetadata::getName).toArray(), - arrayContainingInAnyOrder("template-1", "template-2") - ); - - GetIndexTemplatesRequest getAllRequest = new GetIndexTemplatesRequest(); - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getAll = execute( - getAllRequest, - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getAll.getIndexTemplates().size(), greaterThanOrEqualTo(2)); - assertThat( - getAll.getIndexTemplates() - .stream() - .map(org.opensearch.cluster.metadata.IndexTemplateMetadata::getName) - .collect(Collectors.toList()), - hasItems("template-1", "template-2") - ); - - assertTrue( - execute(new DeleteIndexTemplateRequest("template-1"), client.indices()::deleteTemplate, client.indices()::deleteTemplateAsync) - .isAcknowledged() - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute(new GetIndexTemplatesRequest("template-1"), client.indices()::getTemplate, client.indices()::getTemplateAsync) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute( - new DeleteIndexTemplateRequest("template-1"), - client.indices()::deleteTemplate, - client.indices()::deleteTemplateAsync - ) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - - assertThat( - execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).getIndexTemplates(), - hasSize(1) - ); - assertThat( - execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).getIndexTemplates().get(0).name(), - equalTo("template-2") - ); - - assertTrue( - execute(new DeleteIndexTemplateRequest("template-*"), client.indices()::deleteTemplate, client.indices()::deleteTemplateAsync) - .isAcknowledged() - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - } - public void testCRUDIndexTemplate() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java index 0ea2280b386eb..7276cbb44b030 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java @@ -96,7 +96,6 @@ import static org.opensearch.index.RandomCreateIndexGenerator.randomAlias; import static org.opensearch.index.RandomCreateIndexGenerator.randomIndexSettings; import static org.opensearch.index.alias.RandomAliasActionsGenerator.randomAliasAction; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -140,40 +139,6 @@ public void testIndicesExistEmptyIndices() { ); } - public void testIndicesExistEmptyIndicesWithTypes() { - LuceneTestCase.expectThrows( - IllegalArgumentException.class, - () -> IndicesRequestConverters.indicesExist(new org.opensearch.action.admin.indices.get.GetIndexRequest()) - ); - LuceneTestCase.expectThrows( - IllegalArgumentException.class, - () -> IndicesRequestConverters.indicesExist( - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices((String[]) null) - ) - ); - } - - public void testIndicesExistWithTypes() { - String[] indices = RequestConvertersTests.randomIndicesNames(1, 10); - - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest = - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices(indices); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); - RequestConvertersTests.setRandomLocal(getIndexRequest::local, expectedParams); - RequestConvertersTests.setRandomHumanReadable(getIndexRequest::humanReadable, expectedParams); - RequestConvertersTests.setRandomIncludeDefaults(getIndexRequest::includeDefaults, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - final Request request = IndicesRequestConverters.indicesExist(getIndexRequest); - - Assert.assertEquals(HttpHead.METHOD_NAME, request.getMethod()); - Assert.assertEquals("/" + String.join(",", indices), request.getEndpoint()); - Assert.assertThat(expectedParams, equalTo(request.getParameters())); - Assert.assertNull(request.getEntity()); - } - public void testCreateIndex() throws IOException { CreateIndexRequest createIndexRequest = RandomCreateIndexGenerator.randomCreateIndexRequest(); @@ -189,23 +154,6 @@ public void testCreateIndex() throws IOException { RequestConvertersTests.assertToXContentBody(createIndexRequest, request.getEntity()); } - public void testCreateIndexWithTypes() throws IOException { - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = org.opensearch.index.RandomCreateIndexGenerator - .randomCreateIndexRequest(); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomTimeout(createIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(createIndexRequest, expectedParams); - RequestConvertersTests.setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.createIndex(createIndexRequest); - Assert.assertEquals("/" + createIndexRequest.index(), request.getEndpoint()); - Assert.assertEquals(expectedParams, request.getParameters()); - Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - RequestConvertersTests.assertToXContentBody(createIndexRequest, request.getEntity()); - } - public void testCreateIndexNullIndex() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new CreateIndexRequest(null)); assertEquals(e.getMessage(), "The index name cannot be null."); @@ -254,37 +202,6 @@ public void testPutMapping() throws IOException { RequestConvertersTests.assertToXContentBody(putMappingRequest, request.getEntity()); } - public void testPutMappingWithTypes() throws IOException { - org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest = - new org.opensearch.action.admin.indices.mapping.put.PutMappingRequest(); - - String[] indices = RequestConvertersTests.randomIndicesNames(0, 5); - putMappingRequest.indices(indices); - - String type = OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10); - putMappingRequest.type(type); - - Map expectedParams = new HashMap<>(); - - RequestConvertersTests.setRandomTimeout(putMappingRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(putMappingRequest, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.putMapping(putMappingRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - endpoint.add(type); - Assert.assertEquals(endpoint.toString(), request.getEndpoint()); - - Assert.assertEquals(expectedParams, request.getParameters()); - Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - RequestConvertersTests.assertToXContentBody(putMappingRequest, request.getEntity()); - } - public void testGetMapping() { GetMappingsRequest getMappingRequest = new GetMappingsRequest(); @@ -318,53 +235,6 @@ public void testGetMapping() { Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); } - public void testGetMappingWithTypes() { - org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest getMappingRequest = - new org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest(); - - String[] indices = Strings.EMPTY_ARRAY; - if (randomBoolean()) { - indices = RequestConvertersTests.randomIndicesNames(0, 5); - getMappingRequest.indices(indices); - } else if (randomBoolean()) { - getMappingRequest.indices((String[]) null); - } - - String type = null; - if (randomBoolean()) { - type = randomAlphaOfLengthBetween(3, 10); - getMappingRequest.types(type); - } else if (randomBoolean()) { - getMappingRequest.types((String[]) null); - } - - Map expectedParams = new HashMap<>(); - - RequestConvertersTests.setRandomIndicesOptions( - getMappingRequest::indicesOptions, - getMappingRequest::indicesOptions, - expectedParams - ); - RequestConvertersTests.setRandomMasterTimeout(getMappingRequest, expectedParams); - RequestConvertersTests.setRandomLocal(getMappingRequest::local, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.getMappings(getMappingRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - if (type != null) { - endpoint.add(type); - } - Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - - Assert.assertThat(expectedParams, equalTo(request.getParameters())); - Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - } - public void testGetFieldMapping() { GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest(); @@ -410,67 +280,6 @@ public void testGetFieldMapping() { Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); } - public void testGetFieldMappingWithTypes() { - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest = - new org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest(); - - String[] indices = Strings.EMPTY_ARRAY; - if (randomBoolean()) { - indices = RequestConvertersTests.randomIndicesNames(0, 5); - getFieldMappingsRequest.indices(indices); - } else if (randomBoolean()) { - getFieldMappingsRequest.indices((String[]) null); - } - - String type = null; - if (randomBoolean()) { - type = randomAlphaOfLengthBetween(3, 10); - getFieldMappingsRequest.types(type); - } else if (randomBoolean()) { - getFieldMappingsRequest.types((String[]) null); - } - - String[] fields = null; - if (randomBoolean()) { - fields = new String[randomIntBetween(1, 5)]; - for (int i = 0; i < fields.length; i++) { - fields[i] = randomAlphaOfLengthBetween(3, 10); - } - getFieldMappingsRequest.fields(fields); - } else if (randomBoolean()) { - getFieldMappingsRequest.fields((String[]) null); - } - - Map expectedParams = new HashMap<>(); - - RequestConvertersTests.setRandomIndicesOptions( - getFieldMappingsRequest::indicesOptions, - getFieldMappingsRequest::indicesOptions, - expectedParams - ); - RequestConvertersTests.setRandomLocal(getFieldMappingsRequest::local, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.getFieldMapping(getFieldMappingsRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - if (type != null) { - endpoint.add(type); - } - endpoint.add("field"); - if (fields != null) { - endpoint.add(String.join(",", fields)); - } - Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - - Assert.assertThat(expectedParams, equalTo(request.getParameters())); - Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - } - public void testPutDataStream() { String name = randomAlphaOfLength(10); CreateDataStreamRequest createDataStreamRequest = new CreateDataStreamRequest(name); @@ -603,41 +412,6 @@ public void testGetIndex() throws IOException { Assert.assertThat(request.getEntity(), nullValue()); } - public void testGetIndexWithTypes() throws IOException { - String[] indicesUnderTest = OpenSearchTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); - - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest = - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices(indicesUnderTest); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(getIndexRequest, expectedParams); - RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); - RequestConvertersTests.setRandomLocal(getIndexRequest::local, expectedParams); - RequestConvertersTests.setRandomHumanReadable(getIndexRequest::humanReadable, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - if (OpenSearchTestCase.randomBoolean()) { - // the request object will not have include_defaults present unless it is set to - // true - getIndexRequest.includeDefaults(OpenSearchTestCase.randomBoolean()); - if (getIndexRequest.includeDefaults()) { - expectedParams.put("include_defaults", Boolean.toString(true)); - } - } - - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indicesUnderTest != null && indicesUnderTest.length > 0) { - endpoint.add(String.join(",", indicesUnderTest)); - } - - Request request = IndicesRequestConverters.getIndex(getIndexRequest); - - Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - Assert.assertThat(request.getParameters(), equalTo(expectedParams)); - Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); - Assert.assertThat(request.getEntity(), nullValue()); - } - public void testDeleteIndexEmptyIndices() { String[] indices = OpenSearchTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY; ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate(); @@ -984,51 +758,6 @@ public void testRollover() throws IOException { Assert.assertEquals(expectedParams, request.getParameters()); } - public void testRolloverWithTypes() throws IOException { - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest = - new org.opensearch.action.admin.indices.rollover.RolloverRequest( - OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10), - OpenSearchTestCase.randomBoolean() ? null : OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10) - ); - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomTimeout(rolloverRequest::timeout, rolloverRequest.timeout(), expectedParams); - RequestConvertersTests.setRandomMasterTimeout(rolloverRequest, expectedParams); - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.dryRun(OpenSearchTestCase.randomBoolean()); - if (rolloverRequest.isDryRun()) { - expectedParams.put("dry_run", "true"); - } - } - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.addMaxIndexAgeCondition(new TimeValue(OpenSearchTestCase.randomNonNegativeLong())); - } - if (OpenSearchTestCase.randomBoolean()) { - String type = OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10); - rolloverRequest.getCreateIndexRequest().mapping(type, org.opensearch.index.RandomCreateIndexGenerator.randomMapping(type)); - } - if (OpenSearchTestCase.randomBoolean()) { - org.opensearch.index.RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); - } - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.getCreateIndexRequest().settings(org.opensearch.index.RandomCreateIndexGenerator.randomIndexSettings()); - } - RequestConvertersTests.setRandomWaitForActiveShards(rolloverRequest.getCreateIndexRequest()::waitForActiveShards, expectedParams); - - Request request = IndicesRequestConverters.rollover(rolloverRequest); - if (rolloverRequest.getNewIndexName() == null) { - Assert.assertEquals("/" + rolloverRequest.getRolloverTarget() + "/_rollover", request.getEndpoint()); - } else { - Assert.assertEquals( - "/" + rolloverRequest.getRolloverTarget() + "/_rollover/" + rolloverRequest.getNewIndexName(), - request.getEndpoint() - ); - } - Assert.assertEquals(HttpPost.METHOD_NAME, request.getMethod()); - RequestConvertersTests.assertToXContentBody(rolloverRequest, request.getEntity()); - Assert.assertEquals(expectedParams, request.getParameters()); - } - public void testGetAlias() { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); @@ -1093,57 +822,6 @@ public void testIndexPutSettings() throws IOException { Assert.assertEquals(expectedParams, request.getParameters()); } - public void testPutTemplateRequestWithTypes() throws Exception { - Map names = new HashMap<>(); - names.put("log", "log"); - names.put("template#1", "template%231"); - names.put("-#template", "-%23template"); - names.put("foo^bar", "foo%5Ebar"); - - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplateRequest = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name( - OpenSearchTestCase.randomFrom(names.keySet()) - ).patterns(Arrays.asList(OpenSearchTestCase.generateRandomStringArray(20, 100, false, false))); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.order(OpenSearchTestCase.randomInt()); - } - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.version(OpenSearchTestCase.randomInt()); - } - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.settings( - Settings.builder().put("setting-" + OpenSearchTestCase.randomInt(), OpenSearchTestCase.randomTimeValue()) - ); - } - Map expectedParams = new HashMap<>(); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.mapping( - "doc-" + OpenSearchTestCase.randomInt(), - "field-" + OpenSearchTestCase.randomInt(), - "type=" + OpenSearchTestCase.randomFrom("text", "keyword") - ); - } - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.alias(new Alias("alias-" + OpenSearchTestCase.randomInt())); - } - if (OpenSearchTestCase.randomBoolean()) { - expectedParams.put("create", Boolean.TRUE.toString()); - putTemplateRequest.create(true); - } - if (OpenSearchTestCase.randomBoolean()) { - String cause = OpenSearchTestCase.randomUnicodeOfCodepointLengthBetween(1, 50); - putTemplateRequest.cause(cause); - expectedParams.put("cause", cause); - } - RequestConvertersTests.setRandomMasterTimeout(putTemplateRequest, expectedParams); - - Request request = IndicesRequestConverters.putTemplate(putTemplateRequest); - Assert.assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name()))); - Assert.assertThat(request.getParameters(), equalTo(expectedParams)); - RequestConvertersTests.assertToXContentBody(putTemplateRequest, request.getEntity()); - } - public void testPutTemplateRequest() throws Exception { Map names = new HashMap<>(); names.put("log", "log"); @@ -1198,7 +876,6 @@ public void testPutTemplateRequest() throws Exception { public void testValidateQuery() throws Exception { String[] indices = OpenSearchTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); - String[] types = OpenSearchTestCase.randomBoolean() ? OpenSearchTestCase.generateRandomStringArray(5, 5, false, false) : null; ValidateQueryRequest validateQueryRequest; if (OpenSearchTestCase.randomBoolean()) { validateQueryRequest = new ValidateQueryRequest(indices); @@ -1206,7 +883,6 @@ public void testValidateQuery() throws Exception { validateQueryRequest = new ValidateQueryRequest(); validateQueryRequest.indices(indices); } - validateQueryRequest.types(types); Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomIndicesOptions( validateQueryRequest::indicesOptions, @@ -1223,9 +899,6 @@ public void testValidateQuery() throws Exception { StringJoiner endpoint = new StringJoiner("/", "/", ""); if (indices != null && indices.length > 0) { endpoint.add(String.join(",", indices)); - if (types != null && types.length > 0) { - endpoint.add(String.join(",", types)); - } } endpoint.add("_validate/query"); Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); @@ -1247,8 +920,7 @@ public void testGetTemplateRequest() throws Exception { RequestConvertersTests.setRandomMasterTimeout(getTemplatesRequest::setMasterNodeTimeout, expectedParams); RequestConvertersTests.setRandomLocal(getTemplatesRequest::setLocal, expectedParams); - Request request = IndicesRequestConverters.getTemplatesWithDocumentTypes(getTemplatesRequest); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); + Request request = IndicesRequestConverters.getTemplates(getTemplatesRequest); Assert.assertThat( request.getEndpoint(), equalTo("/_template/" + names.stream().map(encodes::get).collect(Collectors.joining(","))) diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java index 2141ce30dce64..37f4d95d5f4d0 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java @@ -33,19 +33,17 @@ package org.opensearch.client.indices; import org.apache.lucene.util.CollectionUtil; +import org.opensearch.client.AbstractResponseTestCase; import org.opensearch.client.GetAliasesResponseTests; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContent.Params; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.RandomCreateIndexGenerator; import org.opensearch.index.mapper.MapperService; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; import java.util.ArrayList; @@ -57,40 +55,18 @@ import java.util.Map; import java.util.Objects; -import static org.opensearch.test.AbstractXContentTestCase.xContentTester; - -public class GetIndexResponseTests extends OpenSearchTestCase { - - // Because the client-side class does not have a toXContent method, we test xContent serialization by creating - // a random client object, converting it to a server object then serializing it to xContent, and finally - // parsing it back as a client object. We check equality between the original client object, and the parsed one. - public void testFromXContent() throws IOException { - xContentTester( - this::createParser, - GetIndexResponseTests::createTestInstance, - GetIndexResponseTests::toXContent, - GetIndexResponse::fromXContent - ).supportsUnknownFields(false) - .assertToXContentEquivalence(false) - .assertEqualsConsumer(GetIndexResponseTests::assertEqualInstances) - .test(); - } - - private static void assertEqualInstances(GetIndexResponse expected, GetIndexResponse actual) { - assertArrayEquals(expected.getIndices(), actual.getIndices()); - assertEquals(expected.getMappings(), actual.getMappings()); - assertEquals(expected.getSettings(), actual.getSettings()); - assertEquals(expected.getDefaultSettings(), actual.getDefaultSettings()); - assertEquals(expected.getAliases(), actual.getAliases()); - } +public class GetIndexResponseTests extends AbstractResponseTestCase< + org.opensearch.action.admin.indices.get.GetIndexResponse, + GetIndexResponse> { - private static GetIndexResponse createTestInstance() { + @Override + protected org.opensearch.action.admin.indices.get.GetIndexResponse createServerTestInstance(XContentType xContentType) { String[] indices = generateRandomStringArray(5, 5, false, false); - Map mappings = new HashMap<>(); - Map> aliases = new HashMap<>(); - Map settings = new HashMap<>(); - Map defaultSettings = new HashMap<>(); - Map dataStreams = new HashMap<>(); + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder dataStreams = ImmutableOpenMap.builder(); IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS; boolean includeDefaults = randomBoolean(); for (String index : indices) { @@ -116,17 +92,36 @@ private static GetIndexResponse createTestInstance() { dataStreams.put(index, randomAlphaOfLength(5).toLowerCase(Locale.ROOT)); } } - return new GetIndexResponse(indices, mappings, aliases, settings, defaultSettings, dataStreams); + return new org.opensearch.action.admin.indices.get.GetIndexResponse( + indices, + mappings.build(), + aliases.build(), + settings.build(), + defaultSettings.build(), + dataStreams.build() + ); + } + + @Override + protected GetIndexResponse doParseToClientInstance(XContentParser parser) throws IOException { + return GetIndexResponse.fromXContent(parser); + } + + @Override + protected void assertInstances( + org.opensearch.action.admin.indices.get.GetIndexResponse serverTestInstance, + GetIndexResponse clientInstance + ) { + assertArrayEquals(serverTestInstance.getIndices(), clientInstance.getIndices()); + assertMapEquals(serverTestInstance.getMappings(), clientInstance.getMappings()); + assertMapEquals(serverTestInstance.getSettings(), clientInstance.getSettings()); + assertMapEquals(serverTestInstance.defaultSettings(), clientInstance.getDefaultSettings()); + assertMapEquals(serverTestInstance.getAliases(), clientInstance.getAliases()); } private static MappingMetadata createMappingsForIndex() { int typeCount = rarely() ? 0 : 1; - MappingMetadata mmd; - try { - mmd = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap()); - } catch (IOException e) { - throw new RuntimeException(e); - } + MappingMetadata mmd = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap()); for (int i = 0; i < typeCount; i++) { if (rarely() == false) { // rarely have no fields Map mappings = new HashMap<>(); @@ -135,12 +130,8 @@ private static MappingMetadata createMappingsForIndex() { mappings.put("field2-" + i, randomFieldMapping()); } - try { - String typeName = MapperService.SINGLE_MAPPING_NAME; - mmd = new MappingMetadata(typeName, mappings); - } catch (IOException e) { - fail("shouldn't have failed " + e); - } + String typeName = MapperService.SINGLE_MAPPING_NAME; + mmd = new MappingMetadata(typeName, mappings); } } return mmd; @@ -178,39 +169,4 @@ private static Map randomFieldMapping() { } return mappings; } - - private static void toXContent(GetIndexResponse response, XContentBuilder builder) throws IOException { - // first we need to repackage from GetIndexResponse to org.opensearch.action.admin.indices.get.GetIndexResponse - ImmutableOpenMap.Builder> allMappings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); - - Map indexMappings = response.getMappings(); - for (String index : response.getIndices()) { - MappingMetadata mmd = indexMappings.get(index); - ImmutableOpenMap.Builder typedMappings = ImmutableOpenMap.builder(); - if (mmd != null) { - typedMappings.put(MapperService.SINGLE_MAPPING_NAME, mmd); - } - allMappings.put(index, typedMappings.build()); - aliases.put(index, response.getAliases().get(index)); - settings.put(index, response.getSettings().get(index)); - defaultSettings.put(index, response.getDefaultSettings().get(index)); - } - - org.opensearch.action.admin.indices.get.GetIndexResponse serverResponse = - new org.opensearch.action.admin.indices.get.GetIndexResponse( - response.getIndices(), - allMappings.build(), - aliases.build(), - settings.build(), - defaultSettings.build(), - ImmutableOpenMap.builder().build() - ); - - // then we can call its toXContent method, forcing no output of types - Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "false")); - serverResponse.toXContent(builder, params); - } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java index 4049fcb41df99..b28da63e4344a 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java @@ -50,7 +50,6 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -196,13 +195,9 @@ static GetIndexTemplatesResponse createTestInstance() { templateBuilder.version(between(0, 100)); } if (randomBoolean()) { - try { - Map map = XContentHelper.convertToMap(new BytesArray(mappingString), true, XContentType.JSON).v2(); - MappingMetadata mapping = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, map); - templateBuilder.mapping(mapping); - } catch (IOException ex) { - throw new UncheckedIOException(ex); - } + Map map = XContentHelper.convertToMap(new BytesArray(mappingString), true, XContentType.JSON).v2(); + MappingMetadata mapping = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, map); + templateBuilder.mapping(mapping); } templates.add(templateBuilder.build()); } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetMappingsResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetMappingsResponseTests.java index 817bce359b7d7..cb62b116de020 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetMappingsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetMappingsResponseTests.java @@ -32,70 +32,54 @@ package org.opensearch.client.indices; +import org.opensearch.client.AbstractResponseTestCase; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContent.Params; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.MapperService; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.function.Predicate; -import static org.opensearch.client.indices.GetMappingsResponse.MAPPINGS; -import static org.opensearch.test.AbstractXContentTestCase.xContentTester; +public class GetMappingsResponseTests extends AbstractResponseTestCase< + org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse, + GetMappingsResponse> { -public class GetMappingsResponseTests extends OpenSearchTestCase { - - // Because the client-side class does not have a toXContent method, we test xContent serialization by creating - // a random client object, converting it to a server object then serializing it to xContent, and finally - // parsing it back as a client object. We check equality between the original client object, and the parsed one. - public void testFromXContent() throws IOException { - xContentTester( - this::createParser, - GetMappingsResponseTests::createTestInstance, - GetMappingsResponseTests::toXContent, - GetMappingsResponse::fromXContent - ).supportsUnknownFields(true) - .assertEqualsConsumer(GetMappingsResponseTests::assertEqualInstances) - .randomFieldsExcludeFilter(randomFieldsExcludeFilter()) - .test(); - } - - private static GetMappingsResponse createTestInstance() { - Map mappings = Collections.singletonMap("index-" + randomAlphaOfLength(5), randomMappingMetadata()); - return new GetMappingsResponse(mappings); + @Override + protected org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse createServerTestInstance(XContentType xContentType) { + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); + int numberOfIndexes = randomIntBetween(1, 5); + for (int i = 0; i < numberOfIndexes; i++) { + mappings.put("index-" + randomAlphaOfLength(5), randomMappingMetadata()); + } + return new org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse(mappings.build()); } - private static void assertEqualInstances(GetMappingsResponse expected, GetMappingsResponse actual) { - assertEquals(expected.mappings(), actual.mappings()); + @Override + protected GetMappingsResponse doParseToClientInstance(XContentParser parser) throws IOException { + return GetMappingsResponse.fromXContent(parser); } - private Predicate randomFieldsExcludeFilter() { - return field -> !field.equals(MAPPINGS.getPreferredName()); + @Override + protected void assertInstances( + org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse serverTestInstance, + GetMappingsResponse clientInstance + ) { + assertMapEquals(serverTestInstance.getMappings(), clientInstance.mappings()); } public static MappingMetadata randomMappingMetadata() { Map mappings = new HashMap<>(); - if (frequently()) { // rarely have no fields mappings.put("field1", randomFieldMapping()); if (randomBoolean()) { mappings.put("field2", randomFieldMapping()); } } - - try { - return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mappings); - } catch (IOException e) { - throw new RuntimeException(e); - } + return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mappings); } private static Map randomFieldMapping() { @@ -110,22 +94,4 @@ private static Map randomFieldMapping() { } return mappings; } - - private static void toXContent(GetMappingsResponse response, XContentBuilder builder) throws IOException { - Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "false")); - ImmutableOpenMap.Builder> allMappings = ImmutableOpenMap.builder(); - - for (Map.Entry indexEntry : response.mappings().entrySet()) { - ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); - mappings.put(MapperService.SINGLE_MAPPING_NAME, indexEntry.getValue()); - allMappings.put(indexEntry.getKey(), mappings.build()); - } - - org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse serverResponse = - new org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse(allMappings.build()); - - builder.startObject(); - serverResponse.toXContent(builder, params); - builder.endObject(); - } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java index 7577aa66bfcde..0c924bc06046c 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java @@ -38,11 +38,8 @@ import org.opensearch.action.admin.indices.rollover.MaxSizeCondition; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.common.xcontent.ToXContent.Params; import java.io.IOException; import java.util.ArrayList; @@ -51,7 +48,6 @@ import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.Collections; import static org.opensearch.test.AbstractXContentTestCase.xContentTester; @@ -94,7 +90,6 @@ private Predicate getRandomFieldsExcludeFilter() { } private static void toXContent(RolloverResponse response, XContentBuilder builder) throws IOException { - Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "false")); org.opensearch.action.admin.indices.rollover.RolloverResponse serverResponse = new org.opensearch.action.admin.indices.rollover.RolloverResponse( response.getOldIndex(), @@ -105,6 +100,6 @@ private static void toXContent(RolloverResponse response, XContentBuilder builde response.isAcknowledged(), response.isShardsAcknowledged() ); - serverResponse.toXContent(builder, params); + serverResponse.toXContent(builder, null); } } diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 2271fed252793..5c1252061443a 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -33,8 +33,8 @@ import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis apply plugin: 'opensearch.build' apply plugin: 'opensearch.publish' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = 'org.opensearch.client' archivesBaseName = 'opensearch-rest-client' diff --git a/client/rest/src/main/java/org/opensearch/client/NodeSelector.java b/client/rest/src/main/java/org/opensearch/client/NodeSelector.java index 398a3a72b9414..09d5a2c1fe576 100644 --- a/client/rest/src/main/java/org/opensearch/client/NodeSelector.java +++ b/client/rest/src/main/java/org/opensearch/client/NodeSelector.java @@ -48,7 +48,7 @@ public interface NodeSelector { * iterate the nodes as many times as they need. *

* This may be called twice per request: first for "living" nodes that - * have not been blacklisted by previous errors. If the selector removes + * have not been denylisted by previous errors. If the selector removes * all nodes from the list or if there aren't any living nodes then the * {@link RestClient} will call this method with a list of "dead" nodes. *

diff --git a/client/rest/src/main/java/org/opensearch/client/RestClient.java b/client/rest/src/main/java/org/opensearch/client/RestClient.java index c004613f89b7f..4f899fd709112 100644 --- a/client/rest/src/main/java/org/opensearch/client/RestClient.java +++ b/client/rest/src/main/java/org/opensearch/client/RestClient.java @@ -125,7 +125,7 @@ public class RestClient implements Closeable { final List

defaultHeaders; private final String pathPrefix; private final AtomicInteger lastNodeIndex = new AtomicInteger(0); - private final ConcurrentMap blacklist = new ConcurrentHashMap<>(); + private final ConcurrentMap denylist = new ConcurrentHashMap<>(); private final FailureListener failureListener; private final NodeSelector nodeSelector; private volatile NodeTuple> nodeTuple; @@ -246,7 +246,7 @@ public synchronized void setNodes(Collection nodes) { authCache.put(node.getHost(), new BasicScheme()); } this.nodeTuple = new NodeTuple<>(Collections.unmodifiableList(new ArrayList<>(nodesByHost.values())), authCache); - this.blacklist.clear(); + this.denylist.clear(); } /** @@ -448,7 +448,7 @@ public void cancelled() { */ private NodeTuple> nextNodes() throws IOException { NodeTuple> nodeTuple = this.nodeTuple; - Iterable hosts = selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector); + Iterable hosts = selectNodes(nodeTuple, denylist, lastNodeIndex, nodeSelector); return new NodeTuple<>(hosts.iterator(), nodeTuple.authCache); } @@ -458,17 +458,17 @@ private NodeTuple> nextNodes() throws IOException { */ static Iterable selectNodes( NodeTuple> nodeTuple, - Map blacklist, + Map denylist, AtomicInteger lastNodeIndex, NodeSelector nodeSelector ) throws IOException { /* * Sort the nodes into living and dead lists. */ - List livingNodes = new ArrayList<>(Math.max(0, nodeTuple.nodes.size() - blacklist.size())); - List deadNodes = new ArrayList<>(blacklist.size()); + List livingNodes = new ArrayList<>(Math.max(0, nodeTuple.nodes.size() - denylist.size())); + List deadNodes = new ArrayList<>(denylist.size()); for (Node node : nodeTuple.nodes) { - DeadHostState deadness = blacklist.get(node.getHost()); + DeadHostState deadness = denylist.get(node.getHost()); if (deadness == null || deadness.shallBeRetried()) { livingNodes.add(node); } else { @@ -526,9 +526,9 @@ static Iterable selectNodes( * Receives as an argument the host that was used for the successful request. */ private void onResponse(Node node) { - DeadHostState removedHost = this.blacklist.remove(node.getHost()); + DeadHostState removedHost = this.denylist.remove(node.getHost()); if (logger.isDebugEnabled() && removedHost != null) { - logger.debug("removed [" + node + "] from blacklist"); + logger.debug("removed [" + node + "] from denylist"); } } @@ -538,19 +538,19 @@ private void onResponse(Node node) { */ private void onFailure(Node node) { while (true) { - DeadHostState previousDeadHostState = blacklist.putIfAbsent( + DeadHostState previousDeadHostState = denylist.putIfAbsent( node.getHost(), new DeadHostState(DeadHostState.DEFAULT_TIME_SUPPLIER) ); if (previousDeadHostState == null) { if (logger.isDebugEnabled()) { - logger.debug("added [" + node + "] to blacklist"); + logger.debug("added [" + node + "] to denylist"); } break; } - if (blacklist.replace(node.getHost(), previousDeadHostState, new DeadHostState(previousDeadHostState))) { + if (denylist.replace(node.getHost(), previousDeadHostState, new DeadHostState(previousDeadHostState))) { if (logger.isDebugEnabled()) { - logger.debug("updated [" + node + "] already in blacklist"); + logger.debug("updated [" + node + "] already in denylist"); } break; } @@ -718,8 +718,8 @@ static class NodeTuple { } /** - * Contains a reference to a blacklisted node and the time until it is - * revived. We use this so we can do a single pass over the blacklist. + * Contains a reference to a denylisted node and the time until it is + * revived. We use this so we can do a single pass over the denylist. */ private static class DeadNode implements Comparable { final Node node; diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java index 0011622fe24b0..0b7d2881ccb54 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java @@ -62,7 +62,7 @@ import static org.junit.Assert.fail; /** - * Tests for {@link RestClient} behaviour against multiple hosts: fail-over, blacklisting etc. + * Tests for {@link RestClient} behaviour against multiple hosts: fail-over, denylisting etc. * Relies on a mock http client to intercept requests and return desired responses based on request path. */ public class RestClientMultipleHostsTests extends RestClientTestCase { @@ -154,7 +154,7 @@ public void testRoundRobinRetryErrors() throws Exception { fail("request should have failed"); } catch (ResponseException e) { Set hostsSet = hostsSet(); - // first request causes all the hosts to be blacklisted, the returned exception holds one suppressed exception each + // first request causes all the hosts to be denylisted, the returned exception holds one suppressed exception each failureListener.assertCalled(nodes); do { Response response = e.getResponse(); @@ -175,7 +175,7 @@ public void testRoundRobinRetryErrors() throws Exception { assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size()); } catch (IOException e) { Set hostsSet = hostsSet(); - // first request causes all the hosts to be blacklisted, the returned exception holds one suppressed exception each + // first request causes all the hosts to be denylisted, the returned exception holds one suppressed exception each failureListener.assertCalled(nodes); do { HttpHost httpHost = HttpHost.create(e.getMessage()); @@ -211,13 +211,13 @@ public void testRoundRobinRetryErrors() throws Exception { "host [" + response.getHost() + "] not found, most likely used multiple times", hostsSet.remove(response.getHost()) ); - // after the first request, all hosts are blacklisted, a single one gets resurrected each time + // after the first request, all hosts are denylisted, a single one gets resurrected each time failureListener.assertCalled(response.getHost()); assertEquals(0, e.getSuppressed().length); } catch (IOException e) { HttpHost httpHost = HttpHost.create(e.getMessage()); assertTrue("host [" + httpHost + "] not found, most likely used multiple times", hostsSet.remove(httpHost)); - // after the first request, all hosts are blacklisted, a single one gets resurrected each time + // after the first request, all hosts are denylisted, a single one gets resurrected each time failureListener.assertCalled(httpHost); assertEquals(0, e.getSuppressed().length); } diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientTests.java index 169e2dbcfd8c5..ca761dcb6b9b6 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientTests.java @@ -260,11 +260,11 @@ public String toString() { NodeTuple> nodeTuple = new NodeTuple<>(Arrays.asList(n1, n2, n3), null); - Map emptyBlacklist = Collections.emptyMap(); + Map emptyDenylist = Collections.emptyMap(); // Normal cases where the node selector doesn't reject all living nodes - assertSelectLivingHosts(Arrays.asList(n1, n2, n3), nodeTuple, emptyBlacklist, NodeSelector.ANY); - assertSelectLivingHosts(Arrays.asList(n2, n3), nodeTuple, emptyBlacklist, not1); + assertSelectLivingHosts(Arrays.asList(n1, n2, n3), nodeTuple, emptyDenylist, NodeSelector.ANY); + assertSelectLivingHosts(Arrays.asList(n2, n3), nodeTuple, emptyDenylist, not1); /* * Try a NodeSelector that excludes all nodes. This should @@ -274,83 +274,83 @@ public String toString() { String message = "NodeSelector [NONE] rejected all nodes, living [" + "[host=http://1, version=1], [host=http://2, version=2], " + "[host=http://3, version=3]] and dead []"; - assertEquals(message, assertSelectAllRejected(nodeTuple, emptyBlacklist, noNodes)); + assertEquals(message, assertSelectAllRejected(nodeTuple, emptyDenylist, noNodes)); } // Mark all the nodes dead for a few test cases { final AtomicLong time = new AtomicLong(0L); Supplier timeSupplier = time::get; - Map blacklist = new HashMap<>(); - blacklist.put(n1.getHost(), new DeadHostState(timeSupplier)); - blacklist.put(n2.getHost(), new DeadHostState(new DeadHostState(timeSupplier))); - blacklist.put(n3.getHost(), new DeadHostState(new DeadHostState(new DeadHostState(timeSupplier)))); + Map denylist = new HashMap<>(); + denylist.put(n1.getHost(), new DeadHostState(timeSupplier)); + denylist.put(n2.getHost(), new DeadHostState(new DeadHostState(timeSupplier))); + denylist.put(n3.getHost(), new DeadHostState(new DeadHostState(new DeadHostState(timeSupplier)))); /* - * case when fewer nodeTuple than blacklist, won't result in any IllegalCapacityException + * case when fewer nodeTuple than denylist, won't result in any IllegalCapacityException */ { NodeTuple> fewerNodeTuple = new NodeTuple<>(Arrays.asList(n1, n2), null); - assertSelectLivingHosts(Arrays.asList(n1), fewerNodeTuple, blacklist, NodeSelector.ANY); - assertSelectLivingHosts(Arrays.asList(n2), fewerNodeTuple, blacklist, not1); + assertSelectLivingHosts(Arrays.asList(n1), fewerNodeTuple, denylist, NodeSelector.ANY); + assertSelectLivingHosts(Arrays.asList(n2), fewerNodeTuple, denylist, not1); } /* * selectHosts will revive a single host regardless of - * blacklist time. It'll revive the node that is closest + * denylist time. It'll revive the node that is closest * to being revived that the NodeSelector is ok with. */ - assertEquals(singletonList(n1), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), NodeSelector.ANY)); - assertEquals(singletonList(n2), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), not1)); + assertEquals(singletonList(n1), RestClient.selectNodes(nodeTuple, denylist, new AtomicInteger(), NodeSelector.ANY)); + assertEquals(singletonList(n2), RestClient.selectNodes(nodeTuple, denylist, new AtomicInteger(), not1)); /* * Try a NodeSelector that excludes all nodes. This should * return a failure, but a different failure than when the - * blacklist is empty so that the caller knows that all of - * their nodes are blacklisted AND blocked. + * denylist is empty so that the caller knows that all of + * their nodes are denylisted AND blocked. */ String message = "NodeSelector [NONE] rejected all nodes, living [] and dead [" + "[host=http://1, version=1], [host=http://2, version=2], " + "[host=http://3, version=3]]"; - assertEquals(message, assertSelectAllRejected(nodeTuple, blacklist, noNodes)); + assertEquals(message, assertSelectAllRejected(nodeTuple, denylist, noNodes)); /* * Now lets wind the clock forward, past the timeout for one of * the dead nodes. We should return it. */ time.set(new DeadHostState(timeSupplier).getDeadUntilNanos()); - assertSelectLivingHosts(Arrays.asList(n1), nodeTuple, blacklist, NodeSelector.ANY); + assertSelectLivingHosts(Arrays.asList(n1), nodeTuple, denylist, NodeSelector.ANY); /* * But if the NodeSelector rejects that node then we'll pick the * first on that the NodeSelector doesn't reject. */ - assertSelectLivingHosts(Arrays.asList(n2), nodeTuple, blacklist, not1); + assertSelectLivingHosts(Arrays.asList(n2), nodeTuple, denylist, not1); /* * If we wind the clock way into the future, past any of the - * blacklist timeouts then we function as though the nodes aren't - * in the blacklist at all. + * denylist timeouts then we function as though the nodes aren't + * in the denylist at all. */ time.addAndGet(DeadHostState.MAX_CONNECTION_TIMEOUT_NANOS); - assertSelectLivingHosts(Arrays.asList(n1, n2, n3), nodeTuple, blacklist, NodeSelector.ANY); - assertSelectLivingHosts(Arrays.asList(n2, n3), nodeTuple, blacklist, not1); + assertSelectLivingHosts(Arrays.asList(n1, n2, n3), nodeTuple, denylist, NodeSelector.ANY); + assertSelectLivingHosts(Arrays.asList(n2, n3), nodeTuple, denylist, not1); } } private void assertSelectLivingHosts( List expectedNodes, NodeTuple> nodeTuple, - Map blacklist, + Map denylist, NodeSelector nodeSelector ) throws IOException { int iterations = 1000; AtomicInteger lastNodeIndex = new AtomicInteger(0); - assertEquals(expectedNodes, RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector)); + assertEquals(expectedNodes, RestClient.selectNodes(nodeTuple, denylist, lastNodeIndex, nodeSelector)); // Calling it again rotates the set of results for (int i = 1; i < iterations; i++) { Collections.rotate(expectedNodes, 1); - assertEquals("iteration " + i, expectedNodes, RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector)); + assertEquals("iteration " + i, expectedNodes, RestClient.selectNodes(nodeTuple, denylist, lastNodeIndex, nodeSelector)); } } @@ -360,11 +360,11 @@ private void assertSelectLivingHosts( */ private static String assertSelectAllRejected( NodeTuple> nodeTuple, - Map blacklist, + Map denylist, NodeSelector nodeSelector ) { try { - RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(0), nodeSelector); + RestClient.selectNodes(nodeTuple, denylist, new AtomicInteger(0), nodeSelector); throw new AssertionError("expected selectHosts to fail"); } catch (IOException e) { return e.getMessage(); diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index f81f4ccc3b1e8..bc4be1dd153e8 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -30,8 +30,8 @@ apply plugin: 'opensearch.build' apply plugin: 'opensearch.publish' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = 'org.opensearch.client' archivesBaseName = 'opensearch-rest-client-sniffer' diff --git a/client/test/build.gradle b/client/test/build.gradle index 7d1333a84eae7..07d874cf01ea7 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -29,8 +29,8 @@ */ apply plugin: 'opensearch.build' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = "${group}.client.test" diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index c9be5c632cb59..c980217b0b8dc 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -64,7 +64,7 @@ FROM ${base_image} ENV OPENSEARCH_CONTAINER true RUN sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-Linux-* && \\ - sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-Linux-* && \\ + sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.epel.cloud|g' /etc/yum.repos.d/CentOS-Linux-* && \\ for iter in {1..10}; do \\ ${package_manager} update --setopt=tsflags=nodocs -y && \\ ${package_manager} install --setopt=tsflags=nodocs -y \\ diff --git a/distribution/tools/java-version-checker/build.gradle b/distribution/tools/java-version-checker/build.gradle index 1990c2b35c151..9480a86ce6fb7 100644 --- a/distribution/tools/java-version-checker/build.gradle +++ b/distribution/tools/java-version-checker/build.gradle @@ -11,7 +11,9 @@ apply plugin: 'opensearch.build' -targetCompatibility = JavaVersion.VERSION_1_7 +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 + // targetting very old java versions enables a warning by default on newer JDK: disable it. compileJava.options.compilerArgs += '-Xlint:-options' diff --git a/distribution/tools/keystore-cli/build.gradle b/distribution/tools/keystore-cli/build.gradle index 05dddbed501af..1e7473f787ca0 100644 --- a/distribution/tools/keystore-cli/build.gradle +++ b/distribution/tools/keystore-cli/build.gradle @@ -34,6 +34,6 @@ dependencies { compileOnly project(":server") compileOnly project(":libs:opensearch-cli") testImplementation project(":test:framework") - testImplementation 'com.google.jimfs:jimfs:1.1' - testRuntimeOnly 'com.google.guava:guava:31.0.1-jre' + testImplementation 'com.google.jimfs:jimfs:1.2' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index 2f3ede7194a6d..b2e81491da6bd 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -36,10 +36,10 @@ dependencies { compileOnly project(":server") compileOnly project(":libs:opensearch-cli") api "org.bouncycastle:bcpg-fips:1.0.5.1" - api "org.bouncycastle:bc-fips:1.0.2.1" + api "org.bouncycastle:bc-fips:1.0.2.3" testImplementation project(":test:framework") testImplementation 'com.google.jimfs:jimfs:1.2' - testRuntimeOnly 'com.google.guava:guava:31.0.1-jre' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } tasks.named("dependencyLicenses").configure { diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.1.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.1.jar.sha1 deleted file mode 100644 index 3c2bd02f432fe..0000000000000 --- a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3110169183fc532d00f0930f2b5901672515eb7c \ No newline at end of file diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1 new file mode 100644 index 0000000000000..c71320050b7de --- /dev/null +++ b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1 @@ -0,0 +1 @@ +da62b32cb72591f5b4d322e6ab0ce7de3247b534 \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/build.gradle b/distribution/tools/upgrade-cli/build.gradle index 29d06b89395c6..0e1996f3d68fa 100644 --- a/distribution/tools/upgrade-cli/build.gradle +++ b/distribution/tools/upgrade-cli/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" testImplementation project(":test:framework") testImplementation 'com.google.jimfs:jimfs:1.2' - testRuntimeOnly 'com.google.guava:guava:31.0.1-jre' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } tasks.named("dependencyLicenses").configure { diff --git a/libs/core/build.gradle b/libs/core/build.gradle index edb05cd1c22b0..374f2fe572a12 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -54,13 +54,13 @@ if (!isEclipse) { } compileJava11Java { - sourceCompatibility = 11 - targetCompatibility = 11 + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 } forbiddenApisJava11 { if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) { - targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() + targetCompatibility = JavaVersion.VERSION_11 } replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java b/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java index 47cf49b3e320f..f784ef9d16464 100644 --- a/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java +++ b/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java @@ -31,13 +31,13 @@ package org.opensearch.common.util.concurrent; +import org.opensearch.common.concurrent.OneWayGate; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.Matchers; import java.io.IOException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -138,7 +138,7 @@ public void run() { private final class MyRefCounted extends AbstractRefCounted { - private final AtomicBoolean closed = new AtomicBoolean(false); + private final OneWayGate gate = new OneWayGate(); MyRefCounted() { super("test"); @@ -146,11 +146,11 @@ private final class MyRefCounted extends AbstractRefCounted { @Override protected void closeInternal() { - this.closed.set(true); + gate.close(); } public void ensureOpen() { - if (closed.get()) { + if (gate.isClosed()) { assert this.refCount() == 0; throw new IllegalStateException("closed"); } diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java index 08f1efd4dac45..f41c49844997d 100644 --- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java +++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java @@ -54,7 +54,7 @@ * a thread must have {@code modifyThread} to even terminate its own pool, leaving * system threads unprotected. * - * This class throws exception on {@code exitVM} calls, and provides a whitelist where calls + * This class throws exception on {@code exitVM} calls, and provides an allowlist where calls * from exit are allowed. *

* Additionally it enforces threadgroup security with the following rules: diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java index c4d775e040dff..02c3bdfd70ec2 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java @@ -76,7 +76,7 @@ public void testCustomWordDelimiterQueryString() { .addMapping("type1", "field1", "type=text,analyzer=my_analyzer", "field2", "type=text,analyzer=my_analyzer") ); - client().prepareIndex("test", "type1", "1").setSource("field1", "foo bar baz", "field2", "not needed").get(); + client().prepareIndex("test").setId("1").setSource("field1", "foo bar baz", "field2", "not needed").get(); refresh(); SearchResponse response = client().prepareSearch("test") diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/AnalysisPainlessExtension.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/AnalysisPainlessExtension.java index c479a6d01eea5..1c13e51788f26 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/AnalysisPainlessExtension.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/AnalysisPainlessExtension.java @@ -43,13 +43,13 @@ public class AnalysisPainlessExtension implements PainlessExtension { - private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles( + private static final Whitelist ALLOWLIST = WhitelistLoader.loadFromResourceFiles( AnalysisPainlessExtension.class, "painless_whitelist.txt" ); @Override public Map, List> getContextWhitelists() { - return Collections.singletonMap(AnalysisPredicateScript.CONTEXT, Collections.singletonList(WHITELIST)); + return Collections.singletonMap(AnalysisPredicateScript.CONTEXT, Collections.singletonList(ALLOWLIST)); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java index 08e6aa4aa4c1d..faaf636d4a8ff 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -137,7 +137,7 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException { .putList("analysis.analyzer.search_autocomplete.filter", "lowercase", "wordDelimiter") ) ); - client().prepareIndex("test", "test", "1").setSource("name", "ARCOTEL Hotels Deutschland").get(); + client().prepareIndex("test").setId("1").setSource("name", "ARCOTEL Hotels Deutschland").get(); refresh(); SearchResponse search = client().prepareSearch("test") .setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR)) @@ -173,7 +173,8 @@ public void testMultiPhraseCutoff() throws IOException { ); ensureGreen(); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource( "body", "Test: http://www.facebook.com http://elasticsearch.org " @@ -235,7 +236,7 @@ public void testSynonyms() throws IOException { ); ensureGreen(); - client().prepareIndex("test", "type1", "0").setSource("field1", "The quick brown fox jumps over the lazy dog").get(); + client().prepareIndex("test").setId("0").setSource("field1", "The quick brown fox jumps over the lazy dog").get(); refresh(); for (String highlighterType : new String[] { "plain", "fvh", "unified" }) { logger.info("--> highlighting (type=" + highlighterType + ") and searching on field1"); @@ -263,10 +264,12 @@ public void testPhrasePrefix() throws IOException { ensureGreen(); - client().prepareIndex("first_test_index", "type1", "0") + client().prepareIndex("first_test_index") + .setId("0") .setSource("field0", "The quick brown fox jumps over the lazy dog", "field1", "The quick brown fox jumps over the lazy dog") .get(); - client().prepareIndex("first_test_index", "type1", "1") + client().prepareIndex("first_test_index") + .setId("1") .setSource("field1", "The quick browse button is a fancy thing, right bro?") .get(); refresh(); @@ -344,7 +347,8 @@ public void testPhrasePrefix() throws IOException { ) ); // with synonyms - client().prepareIndex("second_test_index", "doc", "0") + client().prepareIndex("second_test_index") + .setId("0") .setSource( "type", "type2", @@ -354,10 +358,11 @@ public void testPhrasePrefix() throws IOException { "The quick brown fox jumps over the lazy dog" ) .get(); - client().prepareIndex("second_test_index", "doc", "1") + client().prepareIndex("second_test_index") + .setId("1") .setSource("type", "type2", "field4", "The quick browse button is a fancy thing, right bro?") .get(); - client().prepareIndex("second_test_index", "doc", "2").setSource("type", "type2", "field4", "a quick fast blue car").get(); + client().prepareIndex("second_test_index").setId("2").setSource("type", "type2", "field4", "a quick fast blue car").get(); refresh(); source = searchSource().postFilter(termQuery("type", "type2")) diff --git a/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java index 6efd7cbcd9c41..aeaa7246f33b8 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java @@ -114,7 +114,7 @@ public void testFailureInConditionalProcessor() { Exception e = expectThrows( Exception.class, - () -> client().prepareIndex("index", "doc") + () -> client().prepareIndex("index") .setId("1") .setSource("x", 0) .setPipeline(pipelineId) @@ -178,7 +178,8 @@ public Settings onNodeStopped(String nodeName) { checkPipelineExists.accept(pipelineIdWithoutScript); checkPipelineExists.accept(pipelineIdWithScript); - client().prepareIndex("index", "doc", "1") + client().prepareIndex("index") + .setId("1") .setSource("x", 0) .setPipeline(pipelineIdWithoutScript) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -186,7 +187,8 @@ public Settings onNodeStopped(String nodeName) { IllegalStateException exception = expectThrows( IllegalStateException.class, - () -> client().prepareIndex("index", "doc", "2") + () -> client().prepareIndex("index") + .setId("2") .setSource("x", 0) .setPipeline(pipelineIdWithScript) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -236,7 +238,8 @@ public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exceptio ); client().admin().cluster().preparePutPipeline("_id", pipeline, XContentType.JSON).get(); - client().prepareIndex("index", "doc", "1") + client().prepareIndex("index") + .setId("1") .setSource("x", 0) .setPipeline("_id") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -254,7 +257,8 @@ public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exceptio internalCluster().fullRestart(); ensureYellow("index"); - client().prepareIndex("index", "doc", "2") + client().prepareIndex("index") + .setId("2") .setSource("x", 0) .setPipeline("_id") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -275,7 +279,8 @@ public void testWithDedicatedIngestNode() throws Exception { ); client().admin().cluster().preparePutPipeline("_id", pipeline, XContentType.JSON).get(); - client().prepareIndex("index", "doc", "1") + client().prepareIndex("index") + .setId("1") .setSource("x", 0) .setPipeline("_id") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -288,7 +293,8 @@ public void testWithDedicatedIngestNode() throws Exception { logger.info("Stopping"); internalCluster().restartNode(node, new InternalTestCluster.RestartCallback()); - client(ingestNode).prepareIndex("index", "doc", "2") + client(ingestNode).prepareIndex("index") + .setId("2") .setSource("x", 0) .setPipeline("_id") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java index 93cb60c5b5296..c45104873c7fd 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java @@ -44,13 +44,13 @@ public class ProcessorsWhitelistExtension implements PainlessExtension { - private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles( + private static final Whitelist ALLOWLIST = WhitelistLoader.loadFromResourceFiles( ProcessorsWhitelistExtension.class, "processors_whitelist.txt" ); @Override public Map, List> getContextWhitelists() { - return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(WHITELIST)); + return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(ALLOWLIST)); } } diff --git a/modules/ingest-common/src/main/resources/org/opensearch/ingest/common/processors_whitelist.txt b/modules/ingest-common/src/main/resources/org/opensearch/ingest/common/processors_whitelist.txt index 1372ef2ed03be..7b8c60507887b 100644 --- a/modules/ingest-common/src/main/resources/org/opensearch/ingest/common/processors_whitelist.txt +++ b/modules/ingest-common/src/main/resources/org/opensearch/ingest/common/processors_whitelist.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist of static processor methods that can be accessed from painless +# This file contains a allowlist of static processor methods that can be accessed from painless class org.opensearch.ingest.common.Processors { long bytes(String) diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index dabbfde754f92..e3feacd71f060 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -39,8 +39,8 @@ dependencies { api "org.apache.lucene:lucene-expressions:${versions.lucene}" api 'org.antlr:antlr4-runtime:4.9.3' api 'org.ow2.asm:asm:9.2' - api 'org.ow2.asm:asm-commons:5.0.4' - api 'org.ow2.asm:asm-tree:5.0.4' + api 'org.ow2.asm:asm-commons:9.2' + api 'org.ow2.asm:asm-tree:9.2' } restResources { restApi { diff --git a/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 deleted file mode 100644 index 94fe0cd92c9c9..0000000000000 --- a/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5a556786086c23cd689a0328f8519db93821c04c diff --git a/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 new file mode 100644 index 0000000000000..7beb3d29afe86 --- /dev/null +++ b/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 @@ -0,0 +1 @@ +f4d7f0fc9054386f2893b602454d48e07d4fbead \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 deleted file mode 100644 index 5822a485a61ff..0000000000000 --- a/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -396ce0c07ba2b481f25a70195c7c94922f0d1b0b \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 new file mode 100644 index 0000000000000..7b486521ecef3 --- /dev/null +++ b/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 @@ -0,0 +1 @@ +d96c99a30f5e1a19b0e609dbb19a44d8518ac01e \ No newline at end of file diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java index 453787fe32972..259234d79ab42 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java @@ -100,7 +100,7 @@ private SearchRequestBuilder buildRequest(String script, Object... params) { public void testBasic() throws Exception { createIndex("test"); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); SearchResponse rsp = buildRequest("doc['foo'] + 1").get(); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); @@ -109,7 +109,7 @@ public void testBasic() throws Exception { public void testFunction() throws Exception { createIndex("test"); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); SearchResponse rsp = buildRequest("doc['foo'] + abs(1)").get(); assertSearchResponse(rsp); assertEquals(1, rsp.getHits().getTotalHits().value); @@ -119,7 +119,7 @@ public void testFunction() throws Exception { public void testBasicUsingDotValue() throws Exception { createIndex("test"); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); SearchResponse rsp = buildRequest("doc['foo'].value + 1").get(); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); @@ -130,9 +130,9 @@ public void testScore() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"), - client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"), - client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye") + client().prepareIndex("test").setId("1").setSource("text", "hello goodbye"), + client().prepareIndex("test").setId("2").setSource("text", "hello hello hello goodbye"), + client().prepareIndex("test").setId("3").setSource("text", "hello hello goodebye") ); ScriptScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction( new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()) @@ -162,8 +162,8 @@ public void testDateMethods() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") + client().prepareIndex("test").setId("1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), + client().prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); SearchResponse rsp = buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()").get(); assertEquals(2, rsp.getHits().getTotalHits().value); @@ -192,8 +192,8 @@ public void testDateObjectMethods() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") + client().prepareIndex("test").setId("1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), + client().prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); SearchResponse rsp = buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour").get(); assertEquals(2, rsp.getHits().getTotalHits().value); @@ -241,9 +241,9 @@ public void testMultiValueMethods() throws Exception { indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource(doc1), - client().prepareIndex("test", "doc", "2").setSource(doc2), - client().prepareIndex("test", "doc", "3").setSource(doc3) + client().prepareIndex("test").setId("1").setSource(doc1), + client().prepareIndex("test").setId("2").setSource(doc2), + client().prepareIndex("test").setId("3").setSource(doc3) ); SearchResponse rsp = buildRequest("doc['double0'].count() + doc['double1'].count()").get(); @@ -324,7 +324,7 @@ public void testMultiValueMethods() throws Exception { public void testInvalidDateMethodCall() throws Exception { OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double", "type=double")); ensureGreen("test"); - indexRandom(true, client().prepareIndex("test", "doc", "1").setSource("double", "178000000.0")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("double", "178000000.0")); try { buildRequest("doc['double'].getYear()").get(); fail(); @@ -347,8 +347,8 @@ public void testSparseField() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "x", 4), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "y", 2) + client().prepareIndex("test").setId("1").setSource("id", 1, "x", 4), + client().prepareIndex("test").setId("2").setSource("id", 2, "y", 2) ); SearchResponse rsp = buildRequest("doc['x'] + 1").get(); OpenSearchAssertions.assertSearchResponse(rsp); @@ -361,7 +361,7 @@ public void testSparseField() throws Exception { public void testMissingField() throws Exception { createIndex("test"); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("x", 4).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("x", 4).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("doc['bogus']").get(); fail("Expected missing field to cause failure"); @@ -380,9 +380,9 @@ public void testParams() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "x", 10), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "x", 3), - client().prepareIndex("test", "doc", "3").setSource("id", 3, "x", 5) + client().prepareIndex("test").setId("1").setSource("id", 1, "x", 10), + client().prepareIndex("test").setId("2").setSource("id", 2, "x", 3), + client().prepareIndex("test").setId("3").setSource("id", 3, "x", 5) ); // a = int, b = double, c = long String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)"; @@ -395,7 +395,7 @@ public void testParams() throws Exception { } public void testCompileFailure() { - client().prepareIndex("test", "doc", "1").setSource("x", 1).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("x", 1).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("garbage%@#%@").get(); fail("Expected expression compilation failure"); @@ -406,7 +406,7 @@ public void testCompileFailure() { } public void testNonNumericParam() { - client().prepareIndex("test", "doc", "1").setSource("x", 1).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("x", 1).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("a", "a", "astring").get(); fail("Expected string parameter to cause failure"); @@ -421,7 +421,7 @@ public void testNonNumericParam() { } public void testNonNumericField() { - client().prepareIndex("test", "doc", "1").setSource("text", "this is not a number").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("text", "this is not a number").setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("doc['text.keyword']").get(); fail("Expected text field to cause execution failure"); @@ -436,7 +436,7 @@ public void testNonNumericField() { } public void testInvalidGlobalVariable() { - client().prepareIndex("test", "doc", "1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("bogus").get(); fail("Expected bogus variable to cause execution failure"); @@ -451,7 +451,7 @@ public void testInvalidGlobalVariable() { } public void testDocWithoutField() { - client().prepareIndex("test", "doc", "1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("doc").get(); fail("Expected doc variable without field to cause execution failure"); @@ -466,7 +466,7 @@ public void testDocWithoutField() { } public void testInvalidFieldMember() { - client().prepareIndex("test", "doc", "1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("doc['foo'].bogus").get(); fail("Expected bogus field member to cause execution failure"); @@ -486,9 +486,9 @@ public void testSpecialValueVariable() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("x", 5, "y", 1.2), - client().prepareIndex("test", "doc", "2").setSource("x", 10, "y", 1.4), - client().prepareIndex("test", "doc", "3").setSource("x", 13, "y", 1.8) + client().prepareIndex("test").setId("1").setSource("x", 5, "y", 1.2), + client().prepareIndex("test").setId("2").setSource("x", 10, "y", 1.4), + client().prepareIndex("test").setId("3").setSource("x", 13, "y", 1.8) ); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); @@ -532,9 +532,9 @@ public void testStringSpecialValueVariable() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("text", "hello"), - client().prepareIndex("test", "doc", "2").setSource("text", "goodbye"), - client().prepareIndex("test", "doc", "3").setSource("text", "hello") + client().prepareIndex("test").setId("1").setSource("text", "hello"), + client().prepareIndex("test").setId("2").setSource("text", "goodbye"), + client().prepareIndex("test").setId("3").setSource("text", "hello") ); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); @@ -564,9 +564,8 @@ public void testInvalidUpdateScript() throws Exception { try { createIndex("test_index"); ensureGreen("test_index"); - indexRandom(true, client().prepareIndex("test_index", "doc", "1").setSource("text_field", "text")); + indexRandom(true, client().prepareIndex("test_index").setId("1").setSource("text_field", "text")); UpdateRequestBuilder urb = client().prepareUpdate().setIndex("test_index"); - urb.setType("doc"); urb.setId("1"); urb.setScript(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "0", Collections.emptyMap())); urb.get(); @@ -585,11 +584,11 @@ public void testPipelineAggregationScript() throws Exception { ensureGreen("agg_index"); indexRandom( true, - client().prepareIndex("agg_index", "doc", "1").setSource("one", 1.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "2").setSource("one", 2.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "3").setSource("one", 3.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0) + client().prepareIndex("agg_index").setId("1").setSource("one", 1.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index").setId("2").setSource("one", 2.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index").setId("3").setSource("one", 3.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index").setId("4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index").setId("5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0) ); SearchResponse response = client().prepareSearch("agg_index") .addAggregation( @@ -649,7 +648,8 @@ public void testGeo() throws Exception { xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "test") @@ -696,9 +696,9 @@ public void testBoolean() throws Exception { ensureGreen(); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "price", 1.0, "vip", true), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "price", 2.0, "vip", false), - client().prepareIndex("test", "doc", "3").setSource("id", 3, "price", 2.0, "vip", false) + client().prepareIndex("test").setId("1").setSource("id", 1, "price", 1.0, "vip", true), + client().prepareIndex("test").setId("2").setSource("id", 2, "price", 2.0, "vip", false), + client().prepareIndex("test").setId("3").setSource("id", 3, "price", 2.0, "vip", false) ); // access .value SearchResponse rsp = buildRequest("doc['vip'].value").get(); @@ -729,8 +729,8 @@ public void testFilterScript() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "foo", 1.0), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "foo", 0.0) + client().prepareIndex("test").setId("1").setSource("id", 1, "foo", 1.0), + client().prepareIndex("test").setId("2").setSource("id", 2, "foo", 0.0) ); SearchRequestBuilder builder = buildRequest("doc['foo'].value"); Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java index 05064f66fef80..5aade265439d2 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java @@ -69,11 +69,9 @@ public void testAllOpsDisabledIndexedScripts() throws IOException { .setId("script1") .setContent(new BytesArray("{\"script\": {\"lang\": \"expression\", \"source\": \"2\"} }"), XContentType.JSON) .get(); - client().prepareIndex("test", "scriptTest", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON).get(); try { - client().prepareUpdate("test", "scriptTest", "1") - .setScript(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) - .get(); + client().prepareUpdate("test", "1").setScript(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())).get(); fail("update script should have been rejected"); } catch (Exception e) { assertThat(e.getMessage(), containsString("failed to execute script")); diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java index 5dad1b17cbf4c..617f1f4f738a0 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java @@ -66,7 +66,8 @@ public void testBasic() throws Exception { final int numDocs = randomIntBetween(10, 100); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - indexRequestBuilders[i] = client().prepareIndex("msearch", "test", String.valueOf(i)) + indexRequestBuilders[i] = client().prepareIndex("msearch") + .setId(String.valueOf(i)) .setSource("odd", (i % 2 == 0), "group", (i % 3)); } indexRandom(true, indexRequestBuilders); diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java index 7622eb55b7b49..61f047a32f1c1 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java @@ -68,8 +68,8 @@ protected Collection> getPlugins() { @Before public void setup() throws IOException { createIndex("test"); - client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("text", "value1").endObject()).get(); - client().prepareIndex("test", "type", "2").setSource(jsonBuilder().startObject().field("text", "value2").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "value1").endObject()).get(); + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("text", "value2").endObject()).get(); client().admin().indices().prepareRefresh().get(); } @@ -185,11 +185,11 @@ public void testIndexedTemplateClient() throws Exception { assertNotNull(getResponse.getSource()); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); bulkRequestBuilder.get(); client().admin().indices().prepareRefresh().get(); @@ -229,11 +229,11 @@ public void testIndexedTemplate() throws Exception { assertAcked(client().admin().cluster().preparePutStoredScript().setId("3").setContent(new BytesArray(script), XContentType.JSON)); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); bulkRequestBuilder.get(); client().admin().indices().prepareRefresh().get(); @@ -270,9 +270,7 @@ public void testIndexedTemplateOverwrite() throws Exception { createIndex("testindex"); ensureGreen("testindex"); - client().prepareIndex("testindex", "test", "1") - .setSource(jsonBuilder().startObject().field("searchtext", "dev1").endObject()) - .get(); + client().prepareIndex("testindex").setId("1").setSource(jsonBuilder().startObject().field("searchtext", "dev1").endObject()).get(); client().admin().indices().prepareRefresh().get(); int iterations = randomIntBetween(2, 11); @@ -354,11 +352,11 @@ public void testIndexedTemplateWithArray() throws Exception { client().admin().cluster().preparePutStoredScript().setId("4").setContent(new BytesArray(multiQuery), XContentType.JSON) ); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); bulkRequestBuilder.get(); client().admin().indices().prepareRefresh().get(); diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java index b66d275686981..68ba824955468 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java @@ -68,10 +68,7 @@ public List routes() { new Route(GET, "/_search/template"), new Route(POST, "/_search/template"), new Route(GET, "/{index}/_search/template"), - new Route(POST, "/{index}/_search/template"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_search/template"), - new Route(POST, "/{index}/{type}/_search/template") + new Route(POST, "/{index}/_search/template") ) ); } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Whitelist.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Whitelist.java index 695c8663872b0..b400c7a027fca 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Whitelist.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Whitelist.java @@ -39,18 +39,18 @@ import java.util.Objects; /** - * Whitelist contains data structures designed to be used to generate a whitelist of Java classes, + * Allowlist contains data structures designed to be used to generate an allowlist of Java classes, * constructors, methods, and fields that can be used within a Painless script at both compile-time * and run-time. * - * A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each + * A Allowlist consists of several pieces with {@link WhitelistClass}s as the top level. Each * {@link WhitelistClass} will contain zero-to-many {@link WhitelistConstructor}s, {@link WhitelistMethod}s, and * {@link WhitelistField}s which are what will be available with a Painless script. See each individual - * whitelist object for more detail. + * allowlist object for more detail. */ public final class Whitelist { - private static final String[] BASE_WHITELIST_FILES = new String[] { + private static final String[] BASE_ALLOWLIST_FILES = new String[] { "org.opensearch.txt", "java.lang.txt", "java.math.txt", @@ -66,37 +66,37 @@ public final class Whitelist { "java.util.stream.txt" }; public static final List BASE_WHITELISTS = Collections.singletonList( - WhitelistLoader.loadFromResourceFiles(Whitelist.class, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, BASE_WHITELIST_FILES) + WhitelistLoader.loadFromResourceFiles(Whitelist.class, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, BASE_ALLOWLIST_FILES) ); - /** The {@link ClassLoader} used to look up the whitelisted Java classes, constructors, methods, and fields. */ + /** The {@link ClassLoader} used to look up the allowlisted Java classes, constructors, methods, and fields. */ public final ClassLoader classLoader; - /** The {@link List} of all the whitelisted Painless classes. */ + /** The {@link List} of all the allowlisted Painless classes. */ public final List whitelistClasses; - /** The {@link List} of all the whitelisted static Painless methods. */ + /** The {@link List} of all the allowlisted static Painless methods. */ public final List whitelistImportedMethods; - /** The {@link List} of all the whitelisted Painless class bindings. */ + /** The {@link List} of all the allowlisted Painless class bindings. */ public final List whitelistClassBindings; - /** The {@link List} of all the whitelisted Painless instance bindings. */ + /** The {@link List} of all the allowlisted Painless instance bindings. */ public final List whitelistInstanceBindings; /** Standard constructor. All values must be not {@code null}. */ public Whitelist( ClassLoader classLoader, - List whitelistClasses, - List whitelistImportedMethods, - List whitelistClassBindings, - List whitelistInstanceBindings + List allowlistClasses, + List allowlistImportedMethods, + List allowlistClassBindings, + List allowlistInstanceBindings ) { this.classLoader = Objects.requireNonNull(classLoader); - this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses)); - this.whitelistImportedMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistImportedMethods)); - this.whitelistClassBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistClassBindings)); - this.whitelistInstanceBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistInstanceBindings)); + this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(allowlistClasses)); + this.whitelistImportedMethods = Collections.unmodifiableList(Objects.requireNonNull(allowlistImportedMethods)); + this.whitelistClassBindings = Collections.unmodifiableList(Objects.requireNonNull(allowlistClassBindings)); + this.whitelistInstanceBindings = Collections.unmodifiableList(Objects.requireNonNull(allowlistInstanceBindings)); } } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java index 3947be6005448..bf5083998f94b 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java @@ -42,7 +42,7 @@ /** * Class represents the equivalent of a Java class in Painless complete with super classes, * constructors, methods, and fields. There must be a one-to-one mapping of class names to Java - * classes. Though, since multiple whitelists may be combined into a single whitelist for a + * classes. Though, since multiple allowlists may be combined into a single allowlist for a * specific context, as long as multiple classes representing the same Java class have the same * class name and have legal constructor/method overloading they can be merged together. * @@ -51,7 +51,7 @@ * number of parameters, and multiples methods with the same name are allowed for a single class * as long as they have the same return type and a different number of parameters. * - * Classes will automatically extend other whitelisted classes if the Java class they represent is a + * Classes will automatically extend other allowlisted classes if the Java class they represent is a * subclass of other classes including Java interfaces. */ public final class WhitelistClass { @@ -62,13 +62,13 @@ public final class WhitelistClass { /** The Java class name this class represents. */ public final String javaClassName; - /** The {@link List} of whitelisted ({@link WhitelistConstructor}s) available to this class. */ + /** The {@link List} of allowlisted ({@link WhitelistConstructor}s) available to this class. */ public final List whitelistConstructors; - /** The {@link List} of whitelisted ({@link WhitelistMethod}s) available to this class. */ + /** The {@link List} of allowlisted ({@link WhitelistMethod}s) available to this class. */ public final List whitelistMethods; - /** The {@link List} of whitelisted ({@link WhitelistField}s) available to this class. */ + /** The {@link List} of allowlisted ({@link WhitelistField}s) available to this class. */ public final List whitelistFields; /** The {@link Map} of annotations for this class. */ @@ -78,18 +78,18 @@ public final class WhitelistClass { public WhitelistClass( String origin, String javaClassName, - List whitelistConstructors, - List whitelistMethods, - List whitelistFields, + List allowlistConstructors, + List allowlistMethods, + List allowlistFields, List painlessAnnotations ) { this.origin = Objects.requireNonNull(origin); this.javaClassName = Objects.requireNonNull(javaClassName); - this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(whitelistConstructors)); - this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistMethods)); - this.whitelistFields = Collections.unmodifiableList(Objects.requireNonNull(whitelistFields)); + this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(allowlistConstructors)); + this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(allowlistMethods)); + this.whitelistFields = Collections.unmodifiableList(Objects.requireNonNull(allowlistFields)); if (painlessAnnotations.isEmpty()) { this.painlessAnnotations = Collections.emptyMap(); diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java index 387453f1ea880..15ce4b84c4b09 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java @@ -50,7 +50,7 @@ */ public class WhitelistClassBinding { - /** Information about where this constructor was whitelisted from. */ + /** Information about where this constructor was allowlisted from. */ public final String origin; /** The Java class name this class binding targets. */ diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java index 4b96d727e4ed6..301829968255c 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java @@ -40,14 +40,14 @@ import java.util.stream.Collectors; /** - * Constructor represents the equivalent of a Java constructor available as a whitelisted class + * Constructor represents the equivalent of a Java constructor available as a allowlisted class * constructor within Painless. Constructors for Painless classes may be accessed exactly as * constructors for Java classes are using the 'new' keyword. Painless classes may have multiple * constructors as long as they comply with arity overloading described for {@link WhitelistClass}. */ public final class WhitelistConstructor { - /** Information about where this constructor was whitelisted from. */ + /** Information about where this constructor was allowlisted from. */ public final String origin; /** diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java index e6519ea68af29..cc3f6290a6c17 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java @@ -40,13 +40,13 @@ import java.util.stream.Collectors; /** - * Field represents the equivalent of a Java field available as a whitelisted class field + * Field represents the equivalent of a Java field available as an allowlisted class field * within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes * are using the '.' operator on an existing class variable/field. */ public class WhitelistField { - /** Information about where this method was whitelisted from. */ + /** Information about where this method was allowlisted from. */ public final String origin; /** The field name used to look up the field reflection object. */ diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java index 6e5994622ecdf..7e9bf0af94bd1 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java @@ -46,7 +46,7 @@ */ public class WhitelistInstanceBinding { - /** Information about where this constructor was whitelisted from. */ + /** Information about where this constructor was allowlisted from. */ public final String origin; /** The Java instance this instance binding targets. */ diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java index 6f123198ab4be..2da6d8fce1d8e 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java @@ -54,7 +54,7 @@ public final class WhitelistLoader { /** * Loads and creates a {@link Whitelist} from one to many text files using only the base annotation parsers. - * See {@link #loadFromResourceFiles(Class, Map, String...)} for information on how to structure a whitelist + * See {@link #loadFromResourceFiles(Class, Map, String...)} for information on how to structure an allowlist * text file. */ public static Whitelist loadFromResourceFiles(Class resource, String... filepaths) { @@ -66,17 +66,17 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep * {@link String}s with a single {@link Class} to be be used to load the resources where each {@link String} * is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java * reflection objects for each individual {@link Class}, {@link Constructor}, {@link Method}, and {@link Field} - * specified as part of the whitelist in the text file. + * specified as part of the allowlist in the text file. * * A single pass is made through each file to collect all the information about each class, constructor, method, - * and field. Most validation will be done at a later point after all whitelists have been gathered and their + * and field. Most validation will be done at a later point after all allowlists have been gathered and their * merging takes place. * * A painless type name is one of the following: *
    *
  • def - The Painless dynamic type which is automatically included without a need to be - * whitelisted.
  • - *
  • fully-qualified Java type name - Any whitelisted Java class will have the equivalent name as + * allowlisted.
  • + *
  • fully-qualified Java type name - Any allowlisted Java class will have the equivalent name as * a Painless type name with the exception that any dollar symbols used as part of inner classes will * be replaced with dot symbols.
  • *
  • short Java type name - The text after the final dot symbol of any specified Java class. A @@ -84,7 +84,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep * as described later.
  • *
* - * The following can be parsed from each whitelist text file: + * The following can be parsed from each allowlist text file: *
    *
  • Blank lines will be ignored by the parser.
  • *
  • Comments may be created starting with a pound '#' symbol and end with a newline. These will @@ -98,19 +98,19 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep *
      *
    • A constructor may be specified starting with an opening parenthesis, followed by a * comma-delimited list of Painless type names corresponding to the type/class names for - * the equivalent Java parameter types (these must be whitelisted as well), a closing + * the equivalent Java parameter types (these must be allowlisted as well), a closing * parenthesis, and a newline.
    • *
    • A method may be specified starting with a Painless type name for the return type, * followed by the Java name of the method (which will also be the Painless name for the * method), an opening parenthesis, a comma-delimited list of Painless type names * corresponding to the type/class names for the equivalent Java parameter types - * (these must be whitelisted as well), a closing parenthesis, and a newline.
    • + * (these must be allowlisted as well), a closing parenthesis, and a newline. *
    • An augmented method may be specified starting with a Painless type name for the return * type, followed by the fully qualified Java name of the class the augmented method is - * part of (this class does not need to be whitelisted), the Java name of the method + * part of (this class does not need to be allowlisted), the Java name of the method * (which will also be the Painless name for the method), an opening parenthesis, a * comma-delimited list of Painless type names corresponding to the type/class names - * for the equivalent Java parameter types (these must be whitelisted as well), a closing + * for the equivalent Java parameter types (these must be allowlisted as well), a closing * parenthesis, and a newline.
    • *
    • A field may be specified starting with a Painless type name for the equivalent Java type * of the field, followed by the Java name of the field (which all be the Painless name @@ -130,7 +130,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep * fully-qualified Java class name. Method argument types, method return types, and field types * must be specified with Painless type names (def, fully-qualified, or short) as described earlier. * - * The following example is used to create a single whitelist text file: + * The following example is used to create a single allowlist text file: * * {@code * # primitive types @@ -164,12 +164,12 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep * } */ public static Whitelist loadFromResourceFiles(Class resource, Map parsers, String... filepaths) { - List whitelistClasses = new ArrayList<>(); - List whitelistStatics = new ArrayList<>(); - List whitelistClassBindings = new ArrayList<>(); + List allowlistClasses = new ArrayList<>(); + List allowlistStatics = new ArrayList<>(); + List allowlistClassBindings = new ArrayList<>(); - // Execute a single pass through the whitelist text files. This will gather all the - // constructors, methods, augmented methods, and fields for each whitelisted class. + // Execute a single pass through the allowlist text files. This will gather all the + // constructors, methods, augmented methods, and fields for each allowlisted class. for (String filepath : filepaths) { String line; int number = -1; @@ -181,11 +181,11 @@ public static Whitelist loadFromResourceFiles(Class resource, Map whitelistConstructors = null; - List whitelistMethods = null; - List whitelistFields = null; + List allowlistConstructors = null; + List allowlistMethods = null; + List allowlistFields = null; List classAnnotations = null; while ((line = reader.readLine()) != null) { @@ -197,7 +197,7 @@ public static Whitelist loadFromResourceFiles(Class resource, Map resource, Map(); - whitelistMethods = new ArrayList<>(); - whitelistFields = new ArrayList<>(); + allowlistConstructors = new ArrayList<>(); + allowlistMethods = new ArrayList<>(); + allowlistFields = new ArrayList<>(); } else if (line.startsWith("static_import ")) { // Ensure the final token of the line is '{'. if (line.endsWith("{") == false) { @@ -250,25 +250,25 @@ public static Whitelist loadFromResourceFiles(Class resource, Map resource, Map resource, Map resource, Map resource, Map resource, Map resource, Map resource, Map resource, Map) resource::getClassLoader); - return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings, Collections.emptyList()); + return new Whitelist(loader, allowlistClasses, allowlistStatics, allowlistClassBindings, Collections.emptyList()); } - private static List parseWhitelistAnnotations(Map parsers, String line) { + private static List parseAllowlistAnnotations(Map parsers, String line) { List annotations; diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java index 2a8e94206e276..9a57a5a098c19 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java @@ -40,7 +40,7 @@ import java.util.stream.Collectors; /** - * Method represents the equivalent of a Java method available as a whitelisted class method + * Method represents the equivalent of a Java method available as an allowlisted class method * within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes * are using the '.' operator on an existing class variable/field. Painless classes may have multiple * methods with the same name as long as they comply with arity overloading described in @@ -50,11 +50,11 @@ * these are known as augmented methods. An augmented method can be added to a class as a part of any * Java class as long as the method is static and the first parameter of the method is the Java class * represented by the class. Note that the augmented method's parent Java class does not need to be - * whitelisted. + * allowlisted. */ public class WhitelistMethod { - /** Information about where this method was whitelisted from. */ + /** Information about where this method was allowlisted from. */ public final String origin; /** diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java index ec270a3363281..9874c61b2a080 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java @@ -39,8 +39,8 @@ import java.util.stream.Stream; /** - * WhitelistAnnotationParser is an interface used to define how to - * parse an annotation against any whitelist object while loading. + * AllowlistAnnotationParser is an interface used to define how to + * parse an annotation against any allowlist object while loading. */ public interface WhitelistAnnotationParser { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java index 5d302e2698f1b..eca931d87b68c 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java @@ -168,7 +168,7 @@ public Loader createLoader(ClassLoader parent) { private final Class scriptClass; /** - * The whitelist the script will use. + * The allowlist the script will use. */ private final PainlessLookup painlessLookup; @@ -182,7 +182,7 @@ public Loader createLoader(ClassLoader parent) { * @param scriptClass The class/interface the script will implement. * @param factoryClass An optional class/interface to create the {@code scriptClass} instance. * @param statefulFactoryClass An optional class/interface to create the {@code factoryClass} instance. - * @param painlessLookup The whitelist the script will use. + * @param painlessLookup The allowlist the script will use. */ Compiler(Class scriptClass, Class factoryClass, Class statefulFactoryClass, PainlessLookup painlessLookup) { this.scriptClass = scriptClass; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java index 1c5b40b0e7166..de6fd5ebc0177 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java @@ -237,10 +237,10 @@ static MethodHandle arrayLengthGetter(Class arrayType) { *

      *

      * This method traverses {@code recieverClass}'s class hierarchy (including interfaces) - * until it finds a matching whitelisted method. If one is not found, it throws an exception. + * until it finds a matching allowlisted method. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching method. *

      - * @param painlessLookup the whitelist + * @param painlessLookup the allowlist * @param functions user defined functions and lambdas * @param constants available constants to be used if the method has the {@code InjectConstantAnnotation} * @param methodHandlesLookup caller's lookup @@ -249,7 +249,7 @@ static MethodHandle arrayLengthGetter(Class arrayType) { * @param name Name of the method. * @param args bootstrap args passed to callsite * @return pointer to matching method to invoke. never returns null. - * @throws IllegalArgumentException if no matching whitelisted method was found. + * @throws IllegalArgumentException if no matching allowlisted method was found. * @throws Throwable if a method reference cannot be converted to an functional interface */ static MethodHandle lookupMethod( @@ -473,26 +473,26 @@ private static MethodHandle lookupReferenceInternal( *

      * The following field loads are allowed: *

        - *
      • Whitelisted {@code field} from receiver's class or any superclasses. - *
      • Whitelisted method named {@code getField()} from receiver's class/superclasses/interfaces. - *
      • Whitelisted method named {@code isField()} from receiver's class/superclasses/interfaces. + *
      • Allowlisted {@code field} from receiver's class or any superclasses. + *
      • Allowlisted method named {@code getField()} from receiver's class/superclasses/interfaces. + *
      • Allowlisted method named {@code isField()} from receiver's class/superclasses/interfaces. *
      • The {@code length} field of an array. *
      • The value corresponding to a map key named {@code field} when the receiver is a Map. *
      • The value in a list at element {@code field} (integer) when the receiver is a List. *
      *

      * This method traverses {@code recieverClass}'s class hierarchy (including interfaces) - * until it finds a matching whitelisted getter. If one is not found, it throws an exception. + * until it finds a matching allowlisted getter. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching getter. *

      - * @param painlessLookup the whitelist + * @param painlessLookup the allowlist * @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. * @return pointer to matching field. never returns null. - * @throws IllegalArgumentException if no matching whitelisted field was found. + * @throws IllegalArgumentException if no matching allowlisted field was found. */ static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receiverClass, String name) { - // first try whitelist + // first try allowlist MethodHandle getter = painlessLookup.lookupRuntimeGetterMethodHandle(receiverClass, name); if (getter != null) { @@ -530,24 +530,24 @@ static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receive *

      * The following field stores are allowed: *

        - *
      • Whitelisted {@code field} from receiver's class or any superclasses. - *
      • Whitelisted method named {@code setField()} from receiver's class/superclasses/interfaces. + *
      • Allowlisted {@code field} from receiver's class or any superclasses. + *
      • Allowlisted method named {@code setField()} from receiver's class/superclasses/interfaces. *
      • The value corresponding to a map key named {@code field} when the receiver is a Map. *
      • The value in a list at element {@code field} (integer) when the receiver is a List. *
      *

      * This method traverses {@code recieverClass}'s class hierarchy (including interfaces) - * until it finds a matching whitelisted setter. If one is not found, it throws an exception. + * until it finds a matching allowlisted setter. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching setter. *

      - * @param painlessLookup the whitelist + * @param painlessLookup the allowlist * @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. * @return pointer to matching field. never returns null. - * @throws IllegalArgumentException if no matching whitelisted field was found. + * @throws IllegalArgumentException if no matching allowlisted field was found. */ static MethodHandle lookupSetter(PainlessLookup painlessLookup, Class receiverClass, String name) { - // first try whitelist + // first try allowlist MethodHandle setter = painlessLookup.lookupRuntimeSetterMethodHandle(receiverClass, name); if (setter != null) { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java index 97e2b6f24666e..0726881b1297f 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java @@ -53,7 +53,7 @@ * shift operator, and dynamic array index normalize. *

      * When a new type is encountered at the call site, we lookup from the appropriate - * whitelist, and cache with a guard. If we encounter too many types, we stop caching. + * allowlist, and cache with a guard. If we encounter too many types, we stop caching. *

      * Based on the cascaded inlining cache from the JSR 292 cookbook * (https://code.google.com/archive/p/jsr292-cookbook/, BSD license) @@ -166,7 +166,7 @@ static boolean checkClass(Class clazz, Object receiver) { } /** - * Does a slow lookup against the whitelist. + * Does a slow lookup against the allowlist. */ private MethodHandle lookup(int flavor, String name, Class receiver) throws Throwable { switch (flavor) { @@ -470,10 +470,10 @@ static boolean checkBoth(Class left, Class right, Object leftObject, Objec * In addition to ordinary parameters, we also take some parameters defined at the call site: *

        *
      • {@code initialDepth}: initial call site depth. this is used to exercise megamorphic fallback. - *
      • {@code flavor}: type of dynamic call it is (and which part of whitelist to look at). + *
      • {@code flavor}: type of dynamic call it is (and which part of allowlist to look at). *
      • {@code args}: flavor-specific args. *
      - * And we take the {@link PainlessLookup} used to compile the script for whitelist checking. + * And we take the {@link PainlessLookup} used to compile the script for allowlist checking. *

      * see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.invokedynamic */ diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java index 097960dfbe620..c6aa266148791 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java @@ -59,8 +59,8 @@ */ public class FunctionRef { /** - * Creates a new FunctionRef which will resolve {@code type::call} from the whitelist. - * @param painlessLookup the whitelist against which this script is being compiled + * Creates a new FunctionRef which will resolve {@code type::call} from the allowlist. + * @param painlessLookup the allowlist against which this script is being compiled * @param functionTable user-defined and synthetic methods generated directly on the script class * @param location the character number within the script at compile-time * @param targetClass functional interface type to implement. diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java index 4c693243d2a22..09a23c15f346d 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java @@ -83,11 +83,11 @@ */ public final class PainlessPlugin extends Plugin implements ScriptPlugin, ExtensiblePlugin, ActionPlugin { - private static final Map, List> whitelists; + private static final Map, List> allowlists; /* - * Contexts from Core that need custom whitelists can add them to the map below. - * Whitelist resources should be added as appropriately named, separate files + * Contexts from Core that need custom allowlists can add them to the map below. + * Allowlist resources should be added as appropriately named, separate files * under Painless' resources */ static { @@ -108,23 +108,23 @@ public final class PainlessPlugin extends Plugin implements ScriptPlugin, Extens ingest.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.ingest.txt")); map.put(IngestScript.CONTEXT, ingest); - whitelists = map; + allowlists = map; } private final SetOnce painlessScriptEngine = new SetOnce<>(); @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - Map, List> contextsWithWhitelists = new HashMap<>(); + Map, List> contextsWithAllowlists = new HashMap<>(); for (ScriptContext context : contexts) { - // we might have a context that only uses the base whitelists, so would not have been filled in by reloadSPI - List contextWhitelists = whitelists.get(context); - if (contextWhitelists == null) { - contextWhitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); + // we might have a context that only uses the base allowlists, so would not have been filled in by reloadSPI + List contextAllowlists = allowlists.get(context); + if (contextAllowlists == null) { + contextAllowlists = new ArrayList<>(Whitelist.BASE_WHITELISTS); } - contextsWithWhitelists.put(context, contextWhitelists); + contextsWithAllowlists.put(context, contextAllowlists); } - painlessScriptEngine.set(new PainlessScriptEngine(settings, contextsWithWhitelists)); + painlessScriptEngine.set(new PainlessScriptEngine(settings, contextsWithAllowlists)); return painlessScriptEngine.get(); } @@ -158,7 +158,7 @@ public void loadExtensions(ExtensionLoader loader) { .stream() .flatMap(extension -> extension.getContextWhitelists().entrySet().stream()) .forEach(entry -> { - List existing = whitelists.computeIfAbsent(entry.getKey(), c -> new ArrayList<>(Whitelist.BASE_WHITELISTS)); + List existing = allowlists.computeIfAbsent(entry.getKey(), c -> new ArrayList<>(Whitelist.BASE_WHITELISTS)); existing.addAll(entry.getValue()); }); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java index 4b2125aac244d..a9333fde6b443 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java @@ -68,7 +68,7 @@ import static org.opensearch.rest.RestRequest.Method.GET; /** - * Internal REST API for querying context information about Painless whitelists. + * Internal REST API for querying context information about Painless allowlists. * Commands include the following: *

        *
      • GET /_scripts/painless/_context -- retrieves a list of contexts
      • diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java index dd8e253db4728..ff3fbc640e990 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java @@ -126,107 +126,107 @@ Class defineBridge(String name, byte[] bytes) { } } - public static PainlessLookup buildFromWhitelists(List whitelists) { + public static PainlessLookup buildFromWhitelists(List allowlists) { PainlessLookupBuilder painlessLookupBuilder = new PainlessLookupBuilder(); String origin = "internal error"; try { - for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { - origin = whitelistClass.origin; + for (Whitelist allowlist : allowlists) { + for (WhitelistClass allowlistClass : allowlist.whitelistClasses) { + origin = allowlistClass.origin; painlessLookupBuilder.addPainlessClass( - whitelist.classLoader, - whitelistClass.javaClassName, - whitelistClass.painlessAnnotations.containsKey(NoImportAnnotation.class) == false + allowlist.classLoader, + allowlistClass.javaClassName, + allowlistClass.painlessAnnotations.containsKey(NoImportAnnotation.class) == false ); } } - for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { - String targetCanonicalClassName = whitelistClass.javaClassName.replace('$', '.'); + for (Whitelist allowlist : allowlists) { + for (WhitelistClass allowlistClass : allowlist.whitelistClasses) { + String targetCanonicalClassName = allowlistClass.javaClassName.replace('$', '.'); - for (WhitelistConstructor whitelistConstructor : whitelistClass.whitelistConstructors) { - origin = whitelistConstructor.origin; + for (WhitelistConstructor allowlistConstructor : allowlistClass.whitelistConstructors) { + origin = allowlistConstructor.origin; painlessLookupBuilder.addPainlessConstructor( targetCanonicalClassName, - whitelistConstructor.canonicalTypeNameParameters, - whitelistConstructor.painlessAnnotations + allowlistConstructor.canonicalTypeNameParameters, + allowlistConstructor.painlessAnnotations ); } - for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) { - origin = whitelistMethod.origin; + for (WhitelistMethod allowlistMethod : allowlistClass.whitelistMethods) { + origin = allowlistMethod.origin; painlessLookupBuilder.addPainlessMethod( - whitelist.classLoader, + allowlist.classLoader, targetCanonicalClassName, - whitelistMethod.augmentedCanonicalClassName, - whitelistMethod.methodName, - whitelistMethod.returnCanonicalTypeName, - whitelistMethod.canonicalTypeNameParameters, - whitelistMethod.painlessAnnotations + allowlistMethod.augmentedCanonicalClassName, + allowlistMethod.methodName, + allowlistMethod.returnCanonicalTypeName, + allowlistMethod.canonicalTypeNameParameters, + allowlistMethod.painlessAnnotations ); } - for (WhitelistField whitelistField : whitelistClass.whitelistFields) { - origin = whitelistField.origin; + for (WhitelistField allowlistField : allowlistClass.whitelistFields) { + origin = allowlistField.origin; painlessLookupBuilder.addPainlessField( targetCanonicalClassName, - whitelistField.fieldName, - whitelistField.canonicalTypeNameParameter + allowlistField.fieldName, + allowlistField.canonicalTypeNameParameter ); } } - for (WhitelistMethod whitelistStatic : whitelist.whitelistImportedMethods) { - origin = whitelistStatic.origin; + for (WhitelistMethod allowlistStatic : allowlist.whitelistImportedMethods) { + origin = allowlistStatic.origin; painlessLookupBuilder.addImportedPainlessMethod( - whitelist.classLoader, - whitelistStatic.augmentedCanonicalClassName, - whitelistStatic.methodName, - whitelistStatic.returnCanonicalTypeName, - whitelistStatic.canonicalTypeNameParameters, - whitelistStatic.painlessAnnotations + allowlist.classLoader, + allowlistStatic.augmentedCanonicalClassName, + allowlistStatic.methodName, + allowlistStatic.returnCanonicalTypeName, + allowlistStatic.canonicalTypeNameParameters, + allowlistStatic.painlessAnnotations ); } - for (WhitelistClassBinding whitelistClassBinding : whitelist.whitelistClassBindings) { - origin = whitelistClassBinding.origin; + for (WhitelistClassBinding allowlistClassBinding : allowlist.whitelistClassBindings) { + origin = allowlistClassBinding.origin; painlessLookupBuilder.addPainlessClassBinding( - whitelist.classLoader, - whitelistClassBinding.targetJavaClassName, - whitelistClassBinding.methodName, - whitelistClassBinding.returnCanonicalTypeName, - whitelistClassBinding.canonicalTypeNameParameters, - whitelistClassBinding.painlessAnnotations + allowlist.classLoader, + allowlistClassBinding.targetJavaClassName, + allowlistClassBinding.methodName, + allowlistClassBinding.returnCanonicalTypeName, + allowlistClassBinding.canonicalTypeNameParameters, + allowlistClassBinding.painlessAnnotations ); } - for (WhitelistInstanceBinding whitelistInstanceBinding : whitelist.whitelistInstanceBindings) { - origin = whitelistInstanceBinding.origin; + for (WhitelistInstanceBinding allowlistInstanceBinding : allowlist.whitelistInstanceBindings) { + origin = allowlistInstanceBinding.origin; painlessLookupBuilder.addPainlessInstanceBinding( - whitelistInstanceBinding.targetInstance, - whitelistInstanceBinding.methodName, - whitelistInstanceBinding.returnCanonicalTypeName, - whitelistInstanceBinding.canonicalTypeNameParameters + allowlistInstanceBinding.targetInstance, + allowlistInstanceBinding.methodName, + allowlistInstanceBinding.returnCanonicalTypeName, + allowlistInstanceBinding.canonicalTypeNameParameters ); } } } catch (Exception exception) { - throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception); + throw new IllegalArgumentException("error loading allowlist(s) " + origin, exception); } return painlessLookupBuilder.build(); } // javaClassNamesToClasses is all the classes that need to be available to the custom classloader - // including classes used as part of imported methods and class bindings but not necessarily whitelisted + // including classes used as part of imported methods and class bindings but not necessarily allowlisted // individually. The values of javaClassNamesToClasses are a superset of the values of // canonicalClassNamesToClasses. private final Map> javaClassNamesToClasses; - // canonicalClassNamesToClasses is all the whitelisted classes available in a Painless script including + // canonicalClassNamesToClasses is all the allowlisted classes available in a Painless script including // classes with imported canonical names but does not include classes from imported methods or class - // bindings unless also whitelisted separately. The values of canonicalClassNamesToClasses are a subset + // bindings unless also allowlisted separately. The values of canonicalClassNamesToClasses are a subset // of the values of javaClassNamesToClasses. private final Map> canonicalClassNamesToClasses; private final Map, PainlessClassBuilder> classesToPainlessClassBuilders; @@ -2060,7 +2060,7 @@ private void setFunctionalInterfaceMethod(Class targetClass, PainlessClassBui /** * Creates a {@link Map} of PainlessMethodKeys to {@link PainlessMethod}s per {@link PainlessClass} stored as * {@link PainlessClass#runtimeMethods} identical to {@link PainlessClass#methods} with the exception of generated - * bridge methods. A generated bridge method is created for each whitelisted method that has at least one parameter + * bridge methods. A generated bridge method is created for each allowlisted method that has at least one parameter * with a boxed type to cast from other numeric primitive/boxed types in a symmetric was not handled by * {@link MethodHandle#asType(MethodType)}. As an example {@link MethodHandle#asType(MethodType)} legally casts * from {@link Integer} to long but not from int to {@link Long}. Generated bridge methods cover the latter case. diff --git a/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy b/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy index d1e2f88bb166a..ccfd6ba70dd16 100644 --- a/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy @@ -34,6 +34,6 @@ grant { // needed to generate runtime classes permission java.lang.RuntimePermission "createClassLoader"; - // needed to find the classloader to load whitelisted classes from + // needed to find the classloader to load allowlisted classes from permission java.lang.RuntimePermission "getClassLoader"; }; diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/java.util.regex.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/java.util.regex.txt index ab12664824be8..05b3e4fa83cc1 100644 --- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/java.util.regex.txt +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/java.util.regex.txt @@ -58,7 +58,7 @@ class java.util.regex.Matcher { String replaceFirst(String) boolean requireEnd() Matcher reset() - # Note: Do not whitelist Matcher.reset(String), it subverts regex limiting + # Note: Do not allowlist Matcher.reset(String), it subverts regex limiting int start() int start(int) Matcher useAnchoringBounds(boolean) diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.aggs.movfn.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.aggs.movfn.txt index ed75c44ce9ffc..e314934ed4e56 100644 --- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.aggs.movfn.txt +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.aggs.movfn.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist for the Moving Function pipeline aggregator in core +# This file contains an allowlist for the Moving Function pipeline aggregator in core class org.opensearch.search.aggregations.pipeline.MovingFunctions { double max(double[]) diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.ingest.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.ingest.txt index 36f3c8e418dd6..cddb8e5f0aa7e 100644 --- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.ingest.txt +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.ingest.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist for the ingest scripts +# This file contains an allowlist for the ingest scripts class java.lang.String { String org.opensearch.painless.api.Augmentation sha1() diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt index 9c8b8fd0d2cb5..cca7e07a95388 100644 --- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist for functions to be used in Score context +# This file contains an allowlist for functions to be used in Score context class org.opensearch.script.ScoreScript @no_import { } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java index d5cd3205b315c..98b0cad9960f8 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java @@ -53,9 +53,9 @@ public class AugmentationTests extends ScriptTestCase { @BeforeClass public static void beforeClass() { Map, List> contexts = newDefaultContexts(); - List digestWhitelist = new ArrayList<>(Whitelist.BASE_WHITELISTS); - digestWhitelist.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.ingest.txt")); - contexts.put(DigestTestScript.CONTEXT, digestWhitelist); + List digestAllowlist = new ArrayList<>(Whitelist.BASE_WHITELISTS); + digestAllowlist.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.ingest.txt")); + contexts.put(DigestTestScript.CONTEXT, digestAllowlist); SCRIPT_ENGINE = new PainlessScriptEngine(Settings.EMPTY, contexts); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java index 65f277741cc81..e5113d93677ab 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java @@ -51,8 +51,8 @@ public class BindingsTests extends ScriptTestCase { @BeforeClass public static void beforeClass() { Map, List> contexts = newDefaultContexts(); - List whitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); - whitelists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); + List allowlists = new ArrayList<>(Whitelist.BASE_WHITELISTS); + allowlists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); InstanceBindingTestClass instanceBindingTestClass = new InstanceBindingTestClass(1); WhitelistInstanceBinding getter = new WhitelistInstanceBinding( @@ -74,16 +74,16 @@ public static void beforeClass() { List instanceBindingsList = new ArrayList<>(); instanceBindingsList.add(getter); instanceBindingsList.add(setter); - Whitelist instanceBindingsWhitelist = new Whitelist( + Whitelist instanceBindingsAllowlist = new Whitelist( instanceBindingTestClass.getClass().getClassLoader(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), instanceBindingsList ); - whitelists.add(instanceBindingsWhitelist); + allowlists.add(instanceBindingsAllowlist); - contexts.put(BindingsTestScript.CONTEXT, whitelists); + contexts.put(BindingsTestScript.CONTEXT, allowlists); SCRIPT_ENGINE = new PainlessScriptEngine(Settings.EMPTY, contexts); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java index 1bc6597b584e4..bf9ad76fc6ceb 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java @@ -35,7 +35,7 @@ import java.util.List; import java.util.function.Function; -/** Currently just a dummy class for testing a few features not yet exposed by whitelist! */ +/** Currently just a dummy class for testing a few features not yet exposed by allowlist! */ public class FeatureTestObject { /** static method that returns true */ public static boolean overloadedStatic() { @@ -47,7 +47,7 @@ public static boolean overloadedStatic(boolean whatToReturn) { return whatToReturn; } - /** static method only whitelisted as a static */ + /** static method only allowlisted as a static */ public static float staticAddFloatsTest(float x, float y) { return x + y; } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java index 9fb0610bc94af..22216e5fb4188 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java @@ -32,7 +32,7 @@ package org.opensearch.painless; -/** Currently just a dummy class for testing a few features not yet exposed by whitelist! */ +/** Currently just a dummy class for testing a few features not yet exposed by allowlist! */ public class FeatureTestObject2 { public FeatureTestObject2() { super(); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java index cb8296a3f233d..8c1f545efcf7a 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java @@ -172,7 +172,7 @@ public void testNamedGroup() { assertEquals("o", exec("Matcher m = /(?f)(?o)o/.matcher('foo'); m.find(); return m.namedGroup('second')")); } - // Make sure some methods on Pattern are whitelisted + // Make sure some methods on Pattern are allowlisted public void testSplit() { assertArrayEquals(new String[] { "cat", "dog" }, (String[]) exec("/,/.split('cat,dog')")); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java index 488c01c6d1a59..a30aa97d33461 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java @@ -62,9 +62,9 @@ public abstract class ScriptTestCase extends OpenSearchTestCase { /** Creates a new contexts map with PainlessTextScript = org.opensearch.painless.test */ protected static Map, List> newDefaultContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); - whitelists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); - contexts.put(PainlessTestScript.CONTEXT, whitelists); + List allowlists = new ArrayList<>(Whitelist.BASE_WHITELISTS); + allowlists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); + contexts.put(PainlessTestScript.CONTEXT, allowlists); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java index 1f5b252cb74a4..e4e754a541414 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java @@ -70,51 +70,51 @@ public void testUnknownAnnotations() { public void testAnnotations() { Map parsers = new HashMap<>(WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS); parsers.put(AnnotationTestObject.TestAnnotation.NAME, AnnotationTestObject.TestAnnotationParser.INSTANCE); - Whitelist whitelist = WhitelistLoader.loadFromResourceFiles(Whitelist.class, parsers, "org.opensearch.painless.annotation"); + Whitelist allowlist = WhitelistLoader.loadFromResourceFiles(Whitelist.class, parsers, "org.opensearch.painless.annotation"); - assertEquals(1, whitelist.whitelistClasses.size()); + assertEquals(1, allowlist.whitelistClasses.size()); - WhitelistClass whitelistClass = whitelist.whitelistClasses.get(0); + WhitelistClass allowlistClass = allowlist.whitelistClasses.get(0); - assertNotNull(whitelistClass.painlessAnnotations.get(NoImportAnnotation.class)); - assertEquals(1, whitelistClass.painlessAnnotations.size()); - assertEquals(3, whitelistClass.whitelistMethods.size()); + assertNotNull(allowlistClass.painlessAnnotations.get(NoImportAnnotation.class)); + assertEquals(1, allowlistClass.painlessAnnotations.size()); + assertEquals(3, allowlistClass.whitelistMethods.size()); int count = 0; - for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) { - if ("deprecatedMethod".equals(whitelistMethod.methodName)) { + for (WhitelistMethod allowlistMethod : allowlistClass.whitelistMethods) { + if ("deprecatedMethod".equals(allowlistMethod.methodName)) { assertEquals( "use another method", - ((DeprecatedAnnotation) whitelistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() + ((DeprecatedAnnotation) allowlistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() ); - assertEquals(1, whitelistMethod.painlessAnnotations.size()); + assertEquals(1, allowlistMethod.painlessAnnotations.size()); ++count; } - if ("annotatedTestMethod".equals(whitelistMethod.methodName)) { - AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) whitelistMethod.painlessAnnotations.get( + if ("annotatedTestMethod".equals(allowlistMethod.methodName)) { + AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) allowlistMethod.painlessAnnotations.get( AnnotationTestObject.TestAnnotation.class )); assertEquals("one", ta.getOne()); assertEquals("two", ta.getTwo()); assertEquals("three", ta.getThree()); - assertEquals(1, whitelistMethod.painlessAnnotations.size()); + assertEquals(1, allowlistMethod.painlessAnnotations.size()); ++count; } - if ("annotatedMultipleMethod".equals(whitelistMethod.methodName)) { + if ("annotatedMultipleMethod".equals(allowlistMethod.methodName)) { assertEquals( "test", - ((DeprecatedAnnotation) whitelistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() + ((DeprecatedAnnotation) allowlistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() ); - AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) whitelistMethod.painlessAnnotations.get( + AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) allowlistMethod.painlessAnnotations.get( AnnotationTestObject.TestAnnotation.class )); assertEquals("one", ta.getOne()); assertEquals("two", ta.getTwo()); assertEquals("three", ta.getThree()); - assertEquals(2, whitelistMethod.painlessAnnotations.size()); + assertEquals(2, allowlistMethod.painlessAnnotations.size()); ++count; } } diff --git a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation index 35808a46a90a2..897fb9c7aff9c 100644 --- a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation +++ b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation @@ -1,4 +1,4 @@ -# whitelist for annotation tests +# allowlist for annotation tests class org.opensearch.painless.AnnotationTestObject @no_import { void deprecatedMethod() @deprecated[message="use another method"] diff --git a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown index 386a0bfd7acf7..c5bb17cc42cb2 100644 --- a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown +++ b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown @@ -1,4 +1,4 @@ -# whitelist for annotation tests with unknown annotation +# allowlist for annotation tests with unknown annotation class org.opensearch.painless.AnnotationTestObject @no_import { void unknownAnnotationMethod() @unknownAnnotation diff --git a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown_with_options b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown_with_options index 616776d5ed3e0..d6d96cc6cfc16 100644 --- a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown_with_options +++ b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown_with_options @@ -1,4 +1,4 @@ -# whitelist for annotation tests with unknown annotation containing options +# allowlist for annotation tests with unknown annotation containing options class org.opensearch.painless.AnnotationTestObject @no_import { void unknownAnnotationMethod() @unknownAnootationWithMessage[arg="arg value"] diff --git a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.test b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.test index d232904057000..5345f7fab8794 100644 --- a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.test +++ b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.test @@ -1,4 +1,4 @@ -# whitelist for tests +# allowlist for tests # TODO: remove this when the transition from Joda to Java datetimes is completed class org.opensearch.script.JodaCompatibleZonedDateTime { diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml index 57e7b4e31e057..fee31fca55dd5 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml @@ -1,4 +1,4 @@ -# Sanity integration test to make sure the custom context and whitelist work for moving_fn pipeline agg +# Sanity integration test to make sure the custom context and allowlist work for moving_fn pipeline agg # setup: - do: diff --git a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index 7ddb27e28d94c..cbba425a04889 100644 --- a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -187,7 +187,7 @@ private void init() throws IOException { } private IndexRequestBuilder prepareIndex(String id, String... texts) throws IOException { - return client().prepareIndex("test", "test", id).setSource("foo", texts); + return client().prepareIndex("test").setId(id).setSource("foo", texts); } private SearchResponse searchById(String id) { diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/BWCTemplateTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/BWCTemplateTests.java index ea9cc46c7707b..d9e40fac1ad0f 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/BWCTemplateTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/BWCTemplateTests.java @@ -58,8 +58,8 @@ public void testBeatsTemplatesBWC() throws Exception { client().admin().indices().preparePutTemplate("packetbeat").setSource(packetBeat, XContentType.JSON).get(); client().admin().indices().preparePutTemplate("filebeat").setSource(fileBeat, XContentType.JSON).get(); - client().prepareIndex("metricbeat-foo", "doc", "1").setSource("message", "foo").get(); - client().prepareIndex("packetbeat-foo", "doc", "1").setSource("message", "foo").get(); - client().prepareIndex("filebeat-foo", "doc", "1").setSource("message", "foo").get(); + client().prepareIndex("metricbeat-foo").setId("1").setSource("message", "foo").get(); + client().prepareIndex("packetbeat-foo").setId("1").setSource("message", "foo").get(); + client().prepareIndex("filebeat-foo").setId("1").setSource("message", "foo").get(); } } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java index 40c4fd24b0b48..b0d7bb9d2e14e 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java @@ -63,7 +63,6 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws new CompressedXContent( Strings.toString( PutMappingRequest.buildFromSimplifiedDef( - "_doc", "my_feature_field", "type=rank_feature", "my_negative_feature_field", diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java index b831e76c4ad71..4e98d2aa1af08 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java @@ -207,7 +207,7 @@ public void testWithDeletes() throws Exception { * the updates cause that. */ UpdateResponse updateResponse; - updateResponse = client().prepareUpdate(indexName, "doc", idToUpdate) + updateResponse = client().prepareUpdate(indexName, idToUpdate) .setRouting("1") .setDoc(Requests.INDEX_CONTENT_TYPE, "count", 1) .setDetectNoop(false) diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java index 6910964ac8c58..ce83790c0d302 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java @@ -534,7 +534,7 @@ public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrCh createIndexRequest("test", "parent", "1", null, "p_field", 1).get(); createIndexRequest("test", "child", "2", "1", "c_field", 1).get(); - client().prepareIndex("test", "doc", "3").setSource("p_field", 1).get(); + client().prepareIndex("test").setId("3").setSource("p_field", 1).get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -769,7 +769,7 @@ public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Excep assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - client().prepareIndex("test", "doc") + client().prepareIndex("test") .setSource(jsonBuilder().startObject().field("text", "value").endObject()) .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); @@ -801,7 +801,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { createIndexRequest("test", "child", "2", "1", "c_field", 1).get(); client().admin().indices().prepareFlush("test").get(); - client().prepareIndex("test", "doc", "3").setSource("p_field", 2).get(); + client().prepareIndex("test").setId("3").setSource("p_field", 2).get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1326,7 +1326,7 @@ public void testParentChildQueriesNoParentType() throws Exception { ensureGreen(); String parentId = "p1"; - client().prepareIndex("test", "doc", parentId).setSource("p_field", "1").get(); + client().prepareIndex("test").setId(parentId).setSource("p_field", "1").get(); refresh(); try { diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java index 03cff9c19a703..ed53b1643cc75 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java @@ -644,7 +644,7 @@ public void testInnerHitsWithIgnoreUnmapped() { assertAcked(prepareCreate("index2")); createIndexRequest("index1", "parent_type", "1", null, "nested_type", Collections.singletonMap("key", "value")).get(); createIndexRequest("index1", "child_type", "2", "1").get(); - client().prepareIndex("index2", "type", "3").setSource("key", "value").get(); + client().prepareIndex("index2").setId("3").setSource("key", "value").get(); refresh(); SearchResponse response = client().prepareSearch("index1", "index2") diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java index 5c5c6b47fb806..5d6d4fb333d49 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java @@ -129,7 +129,7 @@ private IndexRequestBuilder createIndexRequest(String index, String type, String String name = type; type = "doc"; - IndexRequestBuilder indexRequestBuilder = client().prepareIndex(index, type, id); + IndexRequestBuilder indexRequestBuilder = client().prepareIndex(index).setId(id); Map joinField = new HashMap<>(); if (parentId != null) { joinField.put("name", name); diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index f76f14a6d9575..37c0eb051cd55 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -104,13 +104,16 @@ public void testPercolatorQuery() throws Exception { .addMapping("type", "id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchAllQuery()).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "value")).endObject()) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("id", "3") @@ -195,13 +198,16 @@ public void testPercolatorRangeQueries() throws Exception { ) ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(10).to(12)).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(20).to(22)).endObject()) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("query", boolQuery().must(rangeQuery("field1").from(10).to(12)).must(rangeQuery("field1").from(12).to(14))) @@ -209,13 +215,16 @@ public void testPercolatorRangeQueries() throws Exception { ) .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "type", "4") + client().prepareIndex("test") + .setId("4") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(10).to(12)).endObject()) .get(); - client().prepareIndex("test", "type", "5") + client().prepareIndex("test") + .setId("5") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(20).to(22)).endObject()) .get(); - client().prepareIndex("test", "type", "6") + client().prepareIndex("test") + .setId("6") .setSource( jsonBuilder().startObject() .field("query", boolQuery().must(rangeQuery("field2").from(10).to(12)).must(rangeQuery("field2").from(12).to(14))) @@ -223,13 +232,16 @@ public void testPercolatorRangeQueries() throws Exception { ) .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "type", "7") + client().prepareIndex("test") + .setId("7") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")).endObject()) .get(); - client().prepareIndex("test", "type", "8") + client().prepareIndex("test") + .setId("8") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field3").from("192.168.1.20").to("192.168.1.30")).endObject()) .get(); - client().prepareIndex("test", "type", "9") + client().prepareIndex("test") + .setId("9") .setSource( jsonBuilder().startObject() .field( @@ -240,7 +252,8 @@ public void testPercolatorRangeQueries() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type", "10") + client().prepareIndex("test") + .setId("10") .setSource( jsonBuilder().startObject() .field( @@ -315,7 +328,8 @@ public void testPercolatorGeoQueries() throws Exception { ) ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("query", geoDistanceQuery("field1").point(52.18, 4.38).distance(50, DistanceUnit.KILOMETERS)) @@ -324,7 +338,8 @@ public void testPercolatorGeoQueries() throws Exception { ) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("query", geoBoundingBoxQuery("field1").setCorners(52.3, 4.4, 52.1, 4.6)) @@ -333,7 +348,8 @@ public void testPercolatorGeoQueries() throws Exception { ) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field( @@ -367,13 +383,16 @@ public void testPercolatorQueryExistingDocument() throws Exception { .addMapping("type", "id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchAllQuery()).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "value")).endObject()) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("id", "3") @@ -382,9 +401,9 @@ public void testPercolatorQueryExistingDocument() throws Exception { ) .get(); - client().prepareIndex("test", "type", "4").setSource("{\"id\": \"4\"}", XContentType.JSON).get(); - client().prepareIndex("test", "type", "5").setSource(XContentType.JSON, "id", "5", "field1", "value").get(); - client().prepareIndex("test", "type", "6").setSource(XContentType.JSON, "id", "6", "field1", "value", "field2", "value").get(); + client().prepareIndex("test").setId("4").setSource("{\"id\": \"4\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("5").setSource(XContentType.JSON, "id", "5", "field1", "value").get(); + client().prepareIndex("test").setId("6").setSource(XContentType.JSON, "id", "6", "field1", "value", "field2", "value").get(); client().admin().indices().prepareRefresh().get(); logger.info("percolating empty doc"); @@ -422,9 +441,9 @@ public void testPercolatorQueryExistingDocumentSourceDisabled() throws Exception .addMapping("type", "_source", "enabled=false", "field1", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()).get(); - client().prepareIndex("test", "type", "2").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{}", XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); logger.info("percolating empty doc with source disabled"); @@ -443,10 +462,12 @@ public void testPercolatorSpecificQueries() throws Exception { .addMapping("type", "id", "type=keyword", "field1", "type=text", "field2", "type=text", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", 1).field("query", commonTermsQuery("field1", "quick brown fox")).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("id", 2) @@ -454,7 +475,8 @@ public void testPercolatorSpecificQueries() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("id", 3) @@ -469,7 +491,8 @@ public void testPercolatorSpecificQueries() throws Exception { .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "type", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .field("id", 4) @@ -489,7 +512,8 @@ public void testPercolatorSpecificQueries() throws Exception { .get(); // doesn't match - client().prepareIndex("test", "type", "5") + client().prepareIndex("test") + .setId("5") .setSource( jsonBuilder().startObject() .field("id", 5) @@ -543,23 +567,28 @@ public void testPercolatorQueryWithHighlighting() throws Exception { .prepareCreate("test") .addMapping("type", "id", "type=keyword", "field1", fieldMapping, "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchQuery("field1", "brown fox")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "lazy dog")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("id", "3").field("query", termQuery("field1", "jumps")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "4") + client().prepareIndex("test") + .setId("4") .setSource(jsonBuilder().startObject().field("id", "4").field("query", termQuery("field1", "dog")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "5") + client().prepareIndex("test") + .setId("5") .setSource(jsonBuilder().startObject().field("id", "5").field("query", termQuery("field1", "fox")).endObject()) .execute() .actionGet(); @@ -783,10 +812,12 @@ public void testTakePositionOffsetGapIntoAccount() throws Exception { .prepareCreate("test") .addMapping("type", "field", "type=text,position_increment_gap=5", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(4)).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(5)).endObject()) .get(); client().admin().indices().prepareRefresh().get(); @@ -868,10 +899,12 @@ public void testWithMultiplePercolatorFields() throws Exception { ); // Acceptable: - client().prepareIndex("test1", "type", "1") + client().prepareIndex("test1") + .setId("1") .setSource(jsonBuilder().startObject().field(queryFieldName, matchQuery("field", "value")).endObject()) .get(); - client().prepareIndex("test2", "type", "1") + client().prepareIndex("test2") + .setId("1") .setSource( jsonBuilder().startObject() .startObject("object_field") @@ -901,7 +934,8 @@ public void testWithMultiplePercolatorFields() throws Exception { // Unacceptable: MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - client().prepareIndex("test2", "type", "1") + client().prepareIndex("test2") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("object_field") @@ -944,7 +978,8 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { .endObject() .endObject(); assertAcked(client().admin().indices().prepareCreate("test").addMapping("employee", mapping)); - client().prepareIndex("test", "employee", "q1") + client().prepareIndex("test") + .setId("q1") .setSource( jsonBuilder().startObject() .field("id", "q1") @@ -960,7 +995,8 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { ) .get(); // this query should never match as it doesn't use nested query: - client().prepareIndex("test", "employee", "q2") + client().prepareIndex("test") + .setId("q2") .setSource( jsonBuilder().startObject() .field("id", "q2") @@ -970,7 +1006,8 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "employee", "q3") + client().prepareIndex("test") + .setId("q3") .setSource(jsonBuilder().startObject().field("id", "q3").field("query", QueryBuilders.matchAllQuery()).endObject()) .get(); client().admin().indices().prepareRefresh().get(); @@ -1101,15 +1138,18 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { public void testPercolatorQueryViaMultiSearch() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "field1", "type=text", "query", "type=percolator")); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("query", boolQuery().must(matchQuery("field1", "b")).must(matchQuery("field1", "c"))) @@ -1117,11 +1157,13 @@ public void testPercolatorQueryViaMultiSearch() throws Exception { ) .execute() .actionGet(); - client().prepareIndex("test", "type", "4") + client().prepareIndex("test") + .setId("4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "5") + client().prepareIndex("test") + .setId("5") .setSource(jsonBuilder().startObject().field("field1", "c").endObject()) .execute() .actionGet(); @@ -1215,7 +1257,7 @@ public void testDisallowExpensiveQueries() throws IOException { .addMapping("_doc", "id", "type=keyword", "field1", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchQuery("field1", "value")).endObject()) .get(); @@ -1264,13 +1306,13 @@ public void testWrappedWithConstantScore() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", "d", "type=date", "q", "type=percolator")); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("1") .setSource(jsonBuilder().startObject().field("q", boolQuery().must(rangeQuery("d").gt("now"))).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("2") .setSource(jsonBuilder().startObject().field("q", boolQuery().must(rangeQuery("d").lt("now"))).endObject()) .execute() diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java index 5f11feee8f441..12be15552652c 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java @@ -110,20 +110,14 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws docType, new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef( - docType, - queryField, - "type=percolator", - aliasField, - "type=alias,path=" + queryField - ) + PutMappingRequest.buildFromSimplifiedDef(queryField, "type=percolator", aliasField, "type=alias,path=" + queryField) ) ), MapperService.MergeReason.MAPPING_UPDATE ); mapperService.merge( docType, - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, TEXT_FIELD_NAME, "type=text"))), + new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(TEXT_FIELD_NAME, "type=text"))), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java index a25ab9a2bb76f..5038e72e9be5e 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java @@ -50,7 +50,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws super.initializeAdditionalMappings(mapperService); mapperService.merge( "_doc", - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", "some_nested_object", "type=nested"))), + new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("some_nested_object", "type=nested"))), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index 027067563103c..42826d2a368ad 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -634,7 +634,7 @@ public void testStoringQueries() throws Exception { public void testQueryWithRewrite() throws Exception { addQueryFieldMappings(); - client().prepareIndex("remote", "doc", "1").setSource("field", "value").get(); + client().prepareIndex("remote").setId("1").setSource("field", "value").get(); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "1", "field")); ParsedDocument doc = mapperService.documentMapper("doc") .parse( diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java index fa7727d46ce40..e212ad6630e9a 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java @@ -97,7 +97,8 @@ protected Map, Object>> pluginScripts() { public void testPercolateScriptQuery() throws IOException { client().admin().indices().prepareCreate("index").addMapping("type", "query", "type=percolator").get(); - client().prepareIndex("index", "type", "1") + client().prepareIndex("index") + .setId("1") .setSource( jsonBuilder().startObject() .field( @@ -151,7 +152,8 @@ public void testPercolateQueryWithNestedDocuments_doNotLeakBitsetCacheEntries() .setSettings(Settings.builder().put(BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), false)) .addMapping("employee", mapping) ); - client().prepareIndex("test", "employee", "q1") + client().prepareIndex("test") + .setId("q1") .setSource( jsonBuilder().startObject() .field( @@ -238,7 +240,8 @@ public void testPercolateQueryWithNestedDocuments_doLeakFieldDataCacheEntries() mapping.endObject(); createIndex("test", client().admin().indices().prepareCreate("test").addMapping("employee", mapping)); Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "use_fielddata_please", Collections.emptyMap()); - client().prepareIndex("test", "employee", "q1") + client().prepareIndex("test") + .setId("q1") .setSource( jsonBuilder().startObject() .field("query", QueryBuilders.nestedQuery("employees", QueryBuilders.scriptQuery(script), ScoreMode.Avg)) @@ -279,7 +282,8 @@ public void testPercolateQueryWithNestedDocuments_doLeakFieldDataCacheEntries() public void testMapUnmappedFieldAsText() throws IOException { Settings.Builder settings = Settings.builder().put("index.percolator.map_unmapped_fields_as_text", true); createIndex("test", settings.build(), "query", "query", "type=percolator"); - client().prepareIndex("test", "query", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()) .get(); client().admin().indices().prepareRefresh().get(); @@ -310,10 +314,12 @@ public void testRangeQueriesWithNow() throws Exception { "type=percolator" ); - client().prepareIndex("test", "_doc", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from("now-1h").to("now+1h")).endObject()) .get(); - client().prepareIndex("test", "_doc", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field( @@ -325,7 +331,8 @@ public void testRangeQueriesWithNow() throws Exception { .get(); Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "1==1", Collections.emptyMap()); - client().prepareIndex("test", "_doc", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("query", boolQuery().filter(scriptQuery(script)).filter(rangeQuery("field2").from("now-1h").to("now+1h"))) diff --git a/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml b/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml deleted file mode 100644 index 896d2d514bcb9..0000000000000 --- a/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml +++ /dev/null @@ -1,96 +0,0 @@ ---- -"Test percolator basics via rest": - - - do: - indices.create: - include_type_name: true - index: queries_index - body: - mappings: - queries_type: - properties: - query: - type: percolator - foo: - type: keyword - - - do: - indices.create: - include_type_name: true - index: documents_index - body: - mappings: - documents_type: - properties: - foo: - type: keyword - - - do: - index: - index: queries_index - type: queries_type - id: test_percolator - body: - query: - match_all: {} - - - do: - index: - index: documents_index - type: documents_type - id: some_id - body: - foo: bar - - - do: - indices.refresh: {} - - - do: - search: - rest_total_hits_as_int: true - body: - - query: - percolate: - field: query - document: - document_type: queries_type - foo: bar - - match: { hits.total: 1 } - - - do: - msearch: - rest_total_hits_as_int: true - body: - - index: queries_index - - query: - percolate: - field: query - document_type: queries_type - document: - foo: bar - - match: { responses.0.hits.total: 1 } - - - do: - search: - rest_total_hits_as_int: true - body: - - query: - percolate: - field: query - index: documents_index - type: documents_type - id: some_id - - match: { hits.total: 1 } - - - do: - msearch: - rest_total_hits_as_int: true - body: - - index: queries_index - - query: - percolate: - field: query - index: documents_index - type: documents_type - id: some_id - - match: { responses.0.hits.total: 1 } diff --git a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java index 78f0002fd4730..ea80b59711b8a 100644 --- a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.index.IndexNotFoundException; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.rankeval.PrecisionAtK.Detail; @@ -74,29 +73,18 @@ public void setup() { createIndex(TEST_INDEX); ensureGreen(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "1") + client().prepareIndex(TEST_INDEX) + .setId("1") .setSource("id", 1, "text", "berlin", "title", "Berlin, Germany", "population", 3670622) .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "2") - .setSource("id", 2, "text", "amsterdam", "population", 851573) - .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "3") - .setSource("id", 3, "text", "amsterdam", "population", 851573) - .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "4") - .setSource("id", 4, "text", "amsterdam", "population", 851573) - .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "5") - .setSource("id", 5, "text", "amsterdam", "population", 851573) - .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "6") - .setSource("id", 6, "text", "amsterdam", "population", 851573) - .get(); + client().prepareIndex(TEST_INDEX).setId("2").setSource("id", 2, "text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX).setId("3").setSource("id", 3, "text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX).setId("4").setSource("id", 4, "text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX).setId("5").setSource("id", 5, "text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX).setId("6").setSource("id", 6, "text", "amsterdam", "population", 851573).get(); // add another index for testing closed indices etc... - client().prepareIndex("test2", MapperService.SINGLE_MAPPING_NAME, "7") - .setSource("id", 7, "text", "amsterdam", "population", 851573) - .get(); + client().prepareIndex("test2").setId("7").setSource("id", 7, "text", "amsterdam", "population", 851573).get(); refresh(); // set up an alias that can also be used in tests diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 6639e799cdd5d..37526a924da73 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -49,8 +49,8 @@ testClusters.all { // Modules who's integration is explicitly tested in integration tests module ':modules:parent-join' module ':modules:lang-painless' - // Whitelist reindexing from the local node so we can test reindex-from-remote. - setting 'reindex.remote.whitelist', '127.0.0.1:*' + // Allowlist reindexing from the local node so we can test reindex-from-remote. + setting 'reindex.remote.allowlist', '127.0.0.1:*' } test { diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java index 827afdeb7ad86..08bc18442b760 100644 --- a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java @@ -114,7 +114,7 @@ public void testUpdateByQuery() { Client client = client(); client.admin().indices().prepareCreate("foo").get(); client.admin().indices().prepareCreate("bar").get(); - client.admin().indices().preparePutMapping(INDEX_NAME).setType("_doc").setSource("cat", "type=keyword").get(); + client.admin().indices().preparePutMapping(INDEX_NAME).setSource("cat", "type=keyword").get(); { // tag::update-by-query UpdateByQueryRequestBuilder updateByQuery = @@ -302,7 +302,7 @@ private ReindexRequestBuilder reindexAndPartiallyBlock() throws Exception { false, true, IntStream.range(0, numDocs) - .mapToObj(i -> client().prepareIndex(INDEX_NAME, "_doc", Integer.toString(i)).setSource("n", Integer.toString(i))) + .mapToObj(i -> client().prepareIndex(INDEX_NAME).setId(Integer.toString(i)).setSource("n", Integer.toString(i))) .collect(Collectors.toList()) ); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java index 04619efb43c6c..865ae26f6f54d 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java @@ -133,6 +133,7 @@ public Collection createComponents( public List> getSettings() { final List> settings = new ArrayList<>(); settings.add(TransportReindexAction.REMOTE_CLUSTER_WHITELIST); + settings.add(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST); settings.addAll(ReindexSslConfig.getSettings()); return settings; } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java index 671827b0164a9..71c3aad8713e1 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java @@ -59,7 +59,7 @@ class ReindexValidator { static final String SORT_DEPRECATED_MESSAGE = "The sort option in reindex is deprecated. " + "Instead consider using query filtering to find the desired subset of data."; - private final CharacterRunAutomaton remoteWhitelist; + private final CharacterRunAutomaton remoteAllowlist; private final ClusterService clusterService; private final IndexNameExpressionResolver resolver; private final AutoCreateIndex autoCreateIndex; @@ -70,14 +70,14 @@ class ReindexValidator { IndexNameExpressionResolver resolver, AutoCreateIndex autoCreateIndex ) { - this.remoteWhitelist = buildRemoteWhitelist(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings)); + this.remoteAllowlist = buildRemoteAllowlist(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings)); this.clusterService = clusterService; this.resolver = resolver; this.autoCreateIndex = autoCreateIndex; } void initialValidation(ReindexRequest request) { - checkRemoteWhitelist(remoteWhitelist, request.getRemoteInfo()); + checkRemoteAllowlist(remoteAllowlist, request.getRemoteInfo()); ClusterState state = clusterService.state(); validateAgainstAliases( request.getSearchRequest(), @@ -93,32 +93,32 @@ void initialValidation(ReindexRequest request) { } } - static void checkRemoteWhitelist(CharacterRunAutomaton whitelist, RemoteInfo remoteInfo) { + static void checkRemoteAllowlist(CharacterRunAutomaton allowlist, RemoteInfo remoteInfo) { if (remoteInfo == null) { return; } String check = remoteInfo.getHost() + ':' + remoteInfo.getPort(); - if (whitelist.run(check)) { + if (allowlist.run(check)) { return; } - String whiteListKey = TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(); - throw new IllegalArgumentException('[' + check + "] not whitelisted in " + whiteListKey); + String allowListKey = TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(); + throw new IllegalArgumentException('[' + check + "] not allowlisted in " + allowListKey); } /** - * Build the {@link CharacterRunAutomaton} that represents the reindex-from-remote whitelist and make sure that it doesn't whitelist + * Build the {@link CharacterRunAutomaton} that represents the reindex-from-remote allowlist and make sure that it doesn't allowlist * the world. */ - static CharacterRunAutomaton buildRemoteWhitelist(List whitelist) { - if (whitelist.isEmpty()) { + static CharacterRunAutomaton buildRemoteAllowlist(List allowlist) { + if (allowlist.isEmpty()) { return new CharacterRunAutomaton(Automata.makeEmpty()); } - Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY)); + Automaton automaton = Regex.simpleMatchToAutomaton(allowlist.toArray(Strings.EMPTY_ARRAY)); automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (Operations.isTotal(automaton)) { throw new IllegalArgumentException( - "Refusing to start because whitelist " - + whitelist + "Refusing to start because allowlist " + + allowlist + " accepts all addresses. " + "This would allow users to reindex-from-remote any URL they like effectively having OpenSearch make HTTP GETs " + "for them." diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java index aea72e694a637..6f2e5d8e71edb 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java @@ -54,7 +54,7 @@ public RestDeleteByQueryAction() { @Override public List routes() { - return unmodifiableList(asList(new Route(POST, "/{index}/_delete_by_query"), new Route(POST, "/{index}/{type}/_delete_by_query"))); + return unmodifiableList(asList(new Route(POST, "/{index}/_delete_by_query"))); } @Override diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java index d38cb47fc8398..9be1687a09432 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java @@ -55,7 +55,7 @@ public RestUpdateByQueryAction() { @Override public List routes() { - return unmodifiableList(asList(new Route(POST, "/{index}/_update_by_query"), new Route(POST, "/{index}/{type}/_update_by_query"))); + return unmodifiableList(asList(new Route(POST, "/{index}/_update_by_query"))); } @Override diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java index a24c2b002b759..c84d103a2ef6f 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java @@ -56,10 +56,19 @@ import static java.util.Collections.emptyList; public class TransportReindexAction extends HandledTransportAction { - public static final Setting> REMOTE_CLUSTER_WHITELIST = Setting.listSetting( + static final Setting> REMOTE_CLUSTER_WHITELIST = Setting.listSetting( "reindex.remote.whitelist", emptyList(), Function.identity(), + Property.NodeScope, + Property.Deprecated + ); + // The setting below is going to replace the above. + // To keep backwards compatibility, the old usage is remained, and it's also used as the fallback for the new usage. + public static final Setting> REMOTE_CLUSTER_ALLOWLIST = Setting.listSetting( + "reindex.remote.allowlist", + REMOTE_CLUSTER_WHITELIST, + Function.identity(), Property.NodeScope ); public static Optional remoteExtension = Optional.empty(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java index 870f3620062f7..21bbb02fb147c 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java @@ -74,13 +74,13 @@ protected Collection> nodePlugins() { public void testBasics() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c"), - client().prepareIndex("test", "test", "5").setSource("foo", "d"), - client().prepareIndex("test", "test", "6").setSource("foo", "e"), - client().prepareIndex("test", "test", "7").setSource("foo", "f") + client().prepareIndex("test").setId("1").setSource("foo", "a"), + client().prepareIndex("test").setId("2").setSource("foo", "a"), + client().prepareIndex("test").setId("3").setSource("foo", "b"), + client().prepareIndex("test").setId("4").setSource("foo", "c"), + client().prepareIndex("test").setId("5").setSource("foo", "d"), + client().prepareIndex("test").setId("6").setSource("foo", "e"), + client().prepareIndex("test").setId("7").setSource("foo", "f") ); assertHitCount(client().prepareSearch("test").setSize(0).get(), 7); @@ -109,7 +109,7 @@ public void testDeleteByQueryWithOneIndex() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "doc", String.valueOf(i)).setSource("fields1", 1)); + builders.add(client().prepareIndex("test").setId(String.valueOf(i)).setSource("fields1", 1)); } indexRandom(true, true, true, builders); @@ -134,7 +134,7 @@ public void testDeleteByQueryWithMultipleIndices() throws Exception { for (int j = 0; j < docs; j++) { boolean candidate = (j < candidates[i]); - builders.add(client().prepareIndex("test-" + i, "doc", String.valueOf(j)).setSource("candidate", candidate)); + builders.add(client().prepareIndex("test-" + i).setId(String.valueOf(j)).setSource("candidate", candidate)); } } indexRandom(true, true, true, builders); @@ -151,7 +151,7 @@ public void testDeleteByQueryWithMultipleIndices() throws Exception { } public void testDeleteByQueryWithMissingIndex() throws Exception { - indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "a")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "a")); assertHitCount(client().prepareSearch().setSize(0).get(), 1); try { @@ -171,7 +171,7 @@ public void testDeleteByQueryWithRouting() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "test", String.valueOf(i)).setRouting(String.valueOf(i)).setSource("field1", 1)); + builders.add(client().prepareIndex("test").setId(String.valueOf(i)).setRouting(String.valueOf(i)).setSource("field1", 1)); } indexRandom(true, true, true, builders); @@ -199,7 +199,8 @@ public void testDeleteByMatchQuery() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { builders.add( - client().prepareIndex("test", "test", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setRouting(randomAlphaOfLengthBetween(1, 5)) .setSource("foo", "bar") ); @@ -217,7 +218,7 @@ public void testDeleteByMatchQuery() throws Exception { } public void testDeleteByQueryWithDateMath() throws Exception { - indexRandom(true, client().prepareIndex("test", "type", "1").setSource("d", "2013-01-01")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("d", "2013-01-01")); DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(rangeQuery("d").to("now-1h")); assertThat(delete.refresh(true).get(), matcher().deleted(1L)); @@ -231,7 +232,7 @@ public void testDeleteByQueryOnReadOnlyIndex() throws Exception { final int docs = randomIntBetween(1, 50); List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "test", Integer.toString(i)).setSource("field", 1)); + builders.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", 1)); } indexRandom(true, true, true, builders); @@ -254,7 +255,7 @@ public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception { final int docs = randomIntBetween(1, 50); List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "test").setId(Integer.toString(i)).setSource("field", 1)); + builders.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", 1)); } indexRandom(true, true, true, builders); @@ -311,13 +312,13 @@ public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception { public void testSlices() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c"), - client().prepareIndex("test", "test", "5").setSource("foo", "d"), - client().prepareIndex("test", "test", "6").setSource("foo", "e"), - client().prepareIndex("test", "test", "7").setSource("foo", "f") + client().prepareIndex("test").setId("1").setSource("foo", "a"), + client().prepareIndex("test").setId("2").setSource("foo", "a"), + client().prepareIndex("test").setId("3").setSource("foo", "b"), + client().prepareIndex("test").setId("4").setSource("foo", "c"), + client().prepareIndex("test").setId("5").setSource("foo", "d"), + client().prepareIndex("test").setId("6").setSource("foo", "e"), + client().prepareIndex("test").setId("7").setSource("foo", "f") ); assertHitCount(client().prepareSearch("test").setSize(0).get(), 7); @@ -348,7 +349,7 @@ public void testMultipleSources() throws Exception { docs.put(indexName, new ArrayList<>()); int numDocs = between(5, 15); for (int i = 0; i < numDocs; i++) { - docs.get(indexName).add(client().prepareIndex(indexName, "test", Integer.toString(i)).setSource("foo", "a")); + docs.get(indexName).add(client().prepareIndex(indexName).setId(Integer.toString(i)).setSource("foo", "a")); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java index 13101cdf59461..ff765ea0d79e8 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java @@ -55,7 +55,7 @@ public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { for (int t = 0; t < threads.length; t++) { - builders.add(client().prepareIndex("test", "doc").setSource("field", t)); + builders.add(client().prepareIndex("test").setSource("field", t)); } } indexRandom(true, true, true, builders); @@ -96,7 +96,7 @@ public void testConcurrentDeleteByQueriesOnSameDocs() throws Throwable { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "doc", String.valueOf(i)).setSource("foo", "bar")); + builders.add(client().prepareIndex("test").setId(String.valueOf(i)).setSource("foo", "bar")); } indexRandom(true, true, true, builders); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java index 652e4d4d34fd5..0c660e5df9682 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java @@ -51,10 +51,10 @@ public class ReindexBasicTests extends ReindexTestCase { public void testFiltering() throws Exception { indexRandom( true, - client().prepareIndex("source", "test", "1").setSource("foo", "a"), - client().prepareIndex("source", "test", "2").setSource("foo", "a"), - client().prepareIndex("source", "test", "3").setSource("foo", "b"), - client().prepareIndex("source", "test", "4").setSource("foo", "c") + client().prepareIndex("source").setId("1").setSource("foo", "a"), + client().prepareIndex("source").setId("2").setSource("foo", "a"), + client().prepareIndex("source").setId("3").setSource("foo", "b"), + client().prepareIndex("source").setId("4").setSource("foo", "c") ); assertHitCount(client().prepareSearch("source").setSize(0).get(), 4); @@ -84,7 +84,7 @@ public void testCopyMany() throws Exception { List docs = new ArrayList<>(); int max = between(150, 500); for (int i = 0; i < max; i++) { - docs.add(client().prepareIndex("source", "test", Integer.toString(i)).setSource("foo", "a")); + docs.add(client().prepareIndex("source").setId(Integer.toString(i)).setSource("foo", "a")); } indexRandom(true, docs); @@ -111,7 +111,7 @@ public void testCopyManyWithSlices() throws Exception { List docs = new ArrayList<>(); int max = between(150, 500); for (int i = 0; i < max; i++) { - docs.add(client().prepareIndex("source", "test", Integer.toString(i)).setSource("foo", "a")); + docs.add(client().prepareIndex("source").setId(Integer.toString(i)).setSource("foo", "a")); } indexRandom(true, docs); @@ -148,7 +148,7 @@ public void testMultipleSources() throws Exception { docs.put(indexName, new ArrayList<>()); int numDocs = between(50, 200); for (int i = 0; i < numDocs; i++) { - docs.get(indexName).add(client().prepareIndex(indexName, typeName, "id_" + sourceIndex + "_" + i).setSource("foo", "a")); + docs.get(indexName).add(client().prepareIndex(indexName).setId("id_" + sourceIndex + "_" + i).setSource("foo", "a")); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java index db9c2779928ea..c893c5c5b180f 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java @@ -56,7 +56,7 @@ public void testFailuresCauseAbortDefault() throws Exception { * Create the destination index such that the copy will cause a mapping * conflict on every request. */ - indexRandom(true, client().prepareIndex("dest", "_doc", "test").setSource("test", 10) /* Its a string in the source! */); + indexRandom(true, client().prepareIndex("dest").setId("test").setSource("test", 10) /* Its a string in the source! */); indexDocs(100); @@ -77,7 +77,7 @@ public void testFailuresCauseAbortDefault() throws Exception { public void testAbortOnVersionConflict() throws Exception { // Just put something in the way of the copy. - indexRandom(true, client().prepareIndex("dest", "_doc", "1").setSource("test", "test")); + indexRandom(true, client().prepareIndex("dest").setId("1").setSource("test", "test")); indexDocs(100); @@ -139,7 +139,7 @@ public void testResponseOnSearchFailure() throws Exception { private void indexDocs(int count) throws Exception { List docs = new ArrayList<>(count); for (int i = 0; i < count; i++) { - docs.add(client().prepareIndex("source", "_doc", Integer.toString(i)).setSource("test", "words words")); + docs.add(client().prepareIndex("source").setId(Integer.toString(i)).setSource("test", "words words")); } indexRandom(true, docs); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java index e083b877236aa..8012b67253cb6 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java @@ -44,22 +44,22 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; -import static org.opensearch.index.reindex.ReindexValidator.buildRemoteWhitelist; -import static org.opensearch.index.reindex.ReindexValidator.checkRemoteWhitelist; +import static org.opensearch.index.reindex.ReindexValidator.buildRemoteAllowlist; +import static org.opensearch.index.reindex.ReindexValidator.checkRemoteAllowlist; /** - * Tests the reindex-from-remote whitelist of remotes. + * Tests the reindex-from-remote allowlist of remotes. */ public class ReindexFromRemoteWhitelistTests extends OpenSearchTestCase { private final BytesReference query = new BytesArray("{ \"foo\" : \"bar\" }"); public void testLocalRequestWithoutWhitelist() { - checkRemoteWhitelist(buildRemoteWhitelist(emptyList()), null); + checkRemoteAllowlist(buildRemoteAllowlist(emptyList()), null); } public void testLocalRequestWithWhitelist() { - checkRemoteWhitelist(buildRemoteWhitelist(randomWhitelist()), null); + checkRemoteAllowlist(buildRemoteAllowlist(randomAllowlist()), null); } /** @@ -81,16 +81,16 @@ private RemoteInfo newRemoteInfo(String host, int port) { } public void testWhitelistedRemote() { - List whitelist = randomWhitelist(); - String[] inList = whitelist.iterator().next().split(":"); + List allowlist = randomAllowlist(); + String[] inList = allowlist.iterator().next().split(":"); String host = inList[0]; int port = Integer.valueOf(inList[1]); - checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo(host, port)); + checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo(host, port)); } public void testWhitelistedByPrefix() { - checkRemoteWhitelist( - buildRemoteWhitelist(singletonList("*.example.com:9200")), + checkRemoteAllowlist( + buildRemoteAllowlist(singletonList("*.example.com:9200")), new RemoteInfo( randomAlphaOfLength(5), "es.example.com", @@ -104,34 +104,34 @@ public void testWhitelistedByPrefix() { RemoteInfo.DEFAULT_CONNECT_TIMEOUT ) ); - checkRemoteWhitelist( - buildRemoteWhitelist(singletonList("*.example.com:9200")), + checkRemoteAllowlist( + buildRemoteAllowlist(singletonList("*.example.com:9200")), newRemoteInfo("6e134134a1.us-east-1.aws.example.com", 9200) ); } public void testWhitelistedBySuffix() { - checkRemoteWhitelist(buildRemoteWhitelist(singletonList("es.example.com:*")), newRemoteInfo("es.example.com", 9200)); + checkRemoteAllowlist(buildRemoteAllowlist(singletonList("es.example.com:*")), newRemoteInfo("es.example.com", 9200)); } public void testWhitelistedByInfix() { - checkRemoteWhitelist(buildRemoteWhitelist(singletonList("es*.example.com:9200")), newRemoteInfo("es1.example.com", 9200)); + checkRemoteAllowlist(buildRemoteAllowlist(singletonList("es*.example.com:9200")), newRemoteInfo("es1.example.com", 9200)); } public void testLoopbackInWhitelistRemote() throws UnknownHostException { - List whitelist = randomWhitelist(); - whitelist.add("127.0.0.1:*"); - checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo("127.0.0.1", 9200)); + List allowlist = randomAllowlist(); + allowlist.add("127.0.0.1:*"); + checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo("127.0.0.1", 9200)); } public void testUnwhitelistedRemote() { int port = between(1, Integer.MAX_VALUE); - List whitelist = randomBoolean() ? randomWhitelist() : emptyList(); + List allowlist = randomBoolean() ? randomAllowlist() : emptyList(); Exception e = expectThrows( IllegalArgumentException.class, - () -> checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo("not in list", port)) + () -> checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo("not in list", port)) ); - assertEquals("[not in list:" + port + "] not whitelisted in reindex.remote.whitelist", e.getMessage()); + assertEquals("[not in list:" + port + "] not allowlisted in reindex.remote.allowlist", e.getMessage()); } public void testRejectMatchAll() { @@ -140,22 +140,22 @@ public void testRejectMatchAll() { assertMatchesTooMuch(singletonList("***")); assertMatchesTooMuch(Arrays.asList("realstuff", "*")); assertMatchesTooMuch(Arrays.asList("*", "realstuff")); - List random = randomWhitelist(); + List random = randomAllowlist(); random.add("*"); assertMatchesTooMuch(random); } public void testIPv6Address() { - List whitelist = randomWhitelist(); - whitelist.add("[::1]:*"); - checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo("[::1]", 9200)); + List allowlist = randomAllowlist(); + allowlist.add("[::1]:*"); + checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo("[::1]", 9200)); } - private void assertMatchesTooMuch(List whitelist) { - Exception e = expectThrows(IllegalArgumentException.class, () -> buildRemoteWhitelist(whitelist)); + private void assertMatchesTooMuch(List allowlist) { + Exception e = expectThrows(IllegalArgumentException.class, () -> buildRemoteAllowlist(allowlist)); assertEquals( - "Refusing to start because whitelist " - + whitelist + "Refusing to start because allowlist " + + allowlist + " accepts all addresses. " + "This would allow users to reindex-from-remote any URL they like effectively having OpenSearch make HTTP GETs " + "for them.", @@ -163,12 +163,12 @@ private void assertMatchesTooMuch(List whitelist) { ); } - private List randomWhitelist() { + private List randomAllowlist() { int size = between(1, 100); - List whitelist = new ArrayList<>(size); + List allowlist = new ArrayList<>(size); for (int i = 0; i < size; i++) { - whitelist.add(randomAlphaOfLength(5) + ':' + between(1, Integer.MAX_VALUE)); + allowlist.add(randomAlphaOfLength(5) + ':' + between(1, Integer.MAX_VALUE)); } - return whitelist; + return allowlist; } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java index a44880c645d34..8ce850a936557 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -98,15 +98,15 @@ protected boolean addMockHttpTransport() { @Override protected Settings nodeSettings() { Settings.Builder settings = Settings.builder().put(super.nodeSettings()); - // Whitelist reindexing from the http host we're going to use - settings.put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "127.0.0.1:*"); + // Allowlist reindexing from the http host we're going to use + settings.put(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(), "127.0.0.1:*"); settings.put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME); return settings.build(); } @Before public void setupSourceIndex() { - client().prepareIndex("source", "test").setSource("test", "test").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("source").setSource("test", "test").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); } @Before diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java new file mode 100644 index 0000000000000..8ff84223d371e --- /dev/null +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java @@ -0,0 +1,83 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.reindex; + +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Arrays; +import java.util.List; + +/** + * A unit test to validate the former name of the setting 'reindex.remote.allowlist' still take effect, + * after it is deprecated, so that the backwards compatibility is maintained. + * The test can be removed along with removing support of the deprecated setting. + */ +public class ReindexRenamedSettingTests extends OpenSearchTestCase { + private final ReindexPlugin plugin = new ReindexPlugin(); + + /** + * Validate the both settings are known and supported. + */ + public void testReindexSettingsExist() { + List> settings = plugin.getSettings(); + assertTrue( + "Both 'reindex.remote.allowlist' and its predecessor should be supported settings of Reindex plugin", + settings.containsAll( + Arrays.asList(TransportReindexAction.REMOTE_CLUSTER_WHITELIST, TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST) + ) + ); + } + + /** + * Validate the default value of the both settings is the same. + */ + public void testSettingFallback() { + assertEquals( + TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(Settings.EMPTY), + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(Settings.EMPTY) + ); + } + + /** + * Validate the new setting can be configured correctly, and it doesn't impact the old setting. + */ + public void testSettingGetValue() { + Settings settings = Settings.builder().put("reindex.remote.allowlist", "127.0.0.1:*").build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertEquals( + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings), + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getDefault(Settings.EMPTY) + ); + } + + /** + * Validate the value of the old setting will be applied to the new setting, if the new setting is not configured. + */ + public void testSettingGetValueWithFallback() { + Settings settings = Settings.builder().put("reindex.remote.whitelist", "127.0.0.1:*").build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertSettingDeprecationsAndWarnings(new Setting[] { TransportReindexAction.REMOTE_CLUSTER_WHITELIST }); + } + + /** + * Validate the value of the old setting will be ignored, if the new setting is configured. + */ + public void testSettingGetValueWhenBothAreConfigured() { + Settings settings = Settings.builder() + .put("reindex.remote.allowlist", "127.0.0.1:*") + .put("reindex.remote.whitelist", "[::1]:*, 127.0.0.1:*") + .build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings), Arrays.asList("[::1]:*", "127.0.0.1:*")); + assertSettingDeprecationsAndWarnings(new Setting[] { TransportReindexAction.REMOTE_CLUSTER_WHITELIST }); + } + +} diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java index 8d675916437e9..8ce9cf74bb8be 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java @@ -52,7 +52,7 @@ protected Collection> getPlugins() { public void testDeprecatedSort() { int max = between(2, 20); for (int i = 0; i < max; i++) { - client().prepareIndex("source", "_doc").setId(Integer.toString(i)).setSource("foo", i).get(); + client().prepareIndex("source").setId(Integer.toString(i)).setSource("foo", i).get(); } client().admin().indices().prepareRefresh("source").get(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java index e516be131e6a4..733ccc6b61127 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java @@ -127,7 +127,7 @@ private BulkByScrollResponse reindexCreate() { private void setupSourceAbsent() throws Exception { indexRandom( true, - client().prepareIndex("source", "_doc", "test").setVersionType(EXTERNAL).setVersion(SOURCE_VERSION).setSource("foo", "source") + client().prepareIndex("source").setId("test").setVersionType(EXTERNAL).setVersion(SOURCE_VERSION).setSource("foo", "source") ); assertEquals(SOURCE_VERSION, client().prepareGet("source", "test").get().getVersion()); @@ -137,7 +137,7 @@ private void setupDest(int version) throws Exception { setupSourceAbsent(); indexRandom( true, - client().prepareIndex("dest", "_doc", "test").setVersionType(EXTERNAL).setVersion(version).setSource("foo", "dest") + client().prepareIndex("dest").setId("test").setVersionType(EXTERNAL).setVersion(version).setSource("foo", "dest") ); assertEquals(version, client().prepareGet("dest", "test").get().getVersion()); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java deleted file mode 100644 index 11e1f6b478fe3..0000000000000 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.reindex; - -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.io.IOException; - -import static java.util.Collections.emptyList; - -public class RestDeleteByQueryActionTests extends RestActionTestCase { - private RestDeleteByQueryAction action; - - @Before - public void setUpAction() { - action = new RestDeleteByQueryAction(); - controller().registerHandler(action); - } - - public void testParseEmpty() throws IOException { - final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build(); - DeleteByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY); - assertEquals(AbstractBulkByScrollRequest.SIZE_ALL_MATCHES, request.getSize()); - assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE, request.getSearchRequest().source().size()); - } -} diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java deleted file mode 100644 index ef5a94f2e1798..0000000000000 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.reindex; - -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.io.IOException; - -import static java.util.Collections.emptyList; - -public class RestUpdateByQueryActionTests extends RestActionTestCase { - - private RestUpdateByQueryAction action; - - @Before - public void setUpAction() { - action = new RestUpdateByQueryAction(); - controller().registerHandler(action); - } - - public void testParseEmpty() throws IOException { - final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build(); - UpdateByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY); - assertEquals(AbstractBulkByScrollRequest.SIZE_ALL_MATCHES, request.getSize()); - assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE, request.getSearchRequest().source().size()); - } -} diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java index 3f46d621ab8d5..6bedd59515e45 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java @@ -99,7 +99,7 @@ private void testCase(AbstractBulkByScrollRequestBuilder request, String a List docs = new ArrayList<>(); for (int i = 0; i < numSlices * 10; i++) { - docs.add(client().prepareIndex("test", "test", Integer.toString(i)).setSource("foo", "bar")); + docs.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("foo", "bar")); } indexRandom(true, docs); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java index 546f9b07e90b7..124670dba9510 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java @@ -102,8 +102,8 @@ protected boolean addMockHttpTransport() { final Settings nodeSettings() { return Settings.builder() - // whitelist reindexing from the HTTP host we're going to use - .put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "127.0.0.1:*") + // allowlist reindexing from the HTTP host we're going to use + .put(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(), "127.0.0.1:*") .build(); } @@ -198,7 +198,7 @@ private void testCase( // Build the test data. Don't use indexRandom because that won't work consistently with such small thread pools. BulkRequestBuilder bulk = client().prepareBulk(); for (int i = 0; i < DOC_COUNT; i++) { - bulk.add(client().prepareIndex("source", "test").setSource("foo", "bar " + i)); + bulk.add(client().prepareIndex("source").setSource("foo", "bar " + i)); } Retry retry = new Retry(BackoffPolicy.exponentialBackoff(), client().threadPool()); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java index 3ed1f7b563546..4f48b99dccdd4 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java @@ -50,10 +50,10 @@ public class UpdateByQueryBasicTests extends ReindexTestCase { public void testBasics() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c") + client().prepareIndex("test").setId("1").setSource("foo", "a"), + client().prepareIndex("test").setId("2").setSource("foo", "a"), + client().prepareIndex("test").setId("3").setSource("foo", "b"), + client().prepareIndex("test").setId("4").setSource("foo", "c") ); assertHitCount(client().prepareSearch("test").setSize(0).get(), 4); assertEquals(1, client().prepareGet("test", "1").get().getVersion()); @@ -90,10 +90,10 @@ public void testBasics() throws Exception { public void testSlices() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c") + client().prepareIndex("test").setId("1").setSource("foo", "a"), + client().prepareIndex("test").setId("2").setSource("foo", "a"), + client().prepareIndex("test").setId("3").setSource("foo", "b"), + client().prepareIndex("test").setId("4").setSource("foo", "c") ); assertHitCount(client().prepareSearch("test").setSize(0).get(), 4); assertEquals(1, client().prepareGet("test", "1").get().getVersion()); @@ -138,7 +138,7 @@ public void testMultipleSources() throws Exception { docs.put(indexName, new ArrayList<>()); int numDocs = between(5, 15); for (int i = 0; i < numDocs; i++) { - docs.get(indexName).add(client().prepareIndex(indexName, "test", Integer.toString(i)).setSource("foo", "a")); + docs.get(indexName).add(client().prepareIndex(indexName).setId(Integer.toString(i)).setSource("foo", "a")); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java index 3e4c61432c34a..3c2e302cb85e7 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java @@ -55,7 +55,7 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { public void testUpdateWhileReindexing() throws Exception { AtomicReference value = new AtomicReference<>(randomSimpleString(random())); - indexRandom(true, client().prepareIndex("test", "test", "test").setSource("test", value.get())); + indexRandom(true, client().prepareIndex("test").setId("test").setSource("test", value.get())); AtomicReference failure = new AtomicReference<>(); AtomicBoolean keepUpdating = new AtomicBoolean(true); @@ -79,7 +79,8 @@ public void testUpdateWhileReindexing() throws Exception { GetResponse get = client().prepareGet("test", "test").get(); assertEquals(value.get(), get.getSource().get("test")); value.set(randomSimpleString(random())); - IndexRequestBuilder index = client().prepareIndex("test", "test", "test") + IndexRequestBuilder index = client().prepareIndex("test") + .setId("test") .setSource("test", value.get()) .setRefreshPolicy(IMMEDIATE); /* diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml index 7086e048eba3e..15e2397099b65 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml @@ -306,9 +306,9 @@ index: dest --- -"unwhitelisted remote host fails": +"unallowlisted remote host fails": - do: - catch: /\[badremote:9200\] not whitelisted in reindex.remote.whitelist/ + catch: /\[badremote:9200\] not allowlisted in reindex.remote.allowlist/ reindex: body: source: diff --git a/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java index 041550b70a60b..837a30555e127 100644 --- a/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java +++ b/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java @@ -95,7 +95,7 @@ public class URLRepository extends BlobStoreRepository { private final List supportedProtocols; - private final URIPattern[] urlWhiteList; + private final URIPattern[] urlAllowList; private final Environment environment; @@ -120,7 +120,7 @@ public URLRepository( } this.environment = environment; supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(environment.settings()); - urlWhiteList = ALLOWED_URLS_SETTING.get(environment.settings()).toArray(new URIPattern[] {}); + urlAllowList = ALLOWED_URLS_SETTING.get(environment.settings()).toArray(new URIPattern[] {}); basePath = BlobPath.cleanPath(); url = URL_SETTING.exists(metadata.settings()) ? URL_SETTING.get(metadata.settings()) @@ -161,7 +161,7 @@ private URL checkURL(URL url) { for (String supportedProtocol : supportedProtocols) { if (supportedProtocol.equals(protocol)) { try { - if (URIPattern.match(urlWhiteList, url.toURI())) { + if (URIPattern.match(urlAllowList, url.toURI())) { // URL matches white list - no additional processing is needed return url; } diff --git a/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java b/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java index 12120e365fe29..46b9c45b9cf82 100644 --- a/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java +++ b/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java @@ -93,8 +93,8 @@ public void testBasicUsage() throws Exception { // both values should collate to same value indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); // searching for either of the terms should return both results since they collate to the same value @@ -135,9 +135,10 @@ public void testMultipleValues() throws Exception { // everything should be indexed fine, no exceptions indexRandom( true, - client().prepareIndex(index, type, "1") + client().prepareIndex(index) + .setId("1") .setSource("{\"id\":\"1\", \"collate\":[\"" + equivalent[0] + "\", \"" + equivalent[1] + "\"]}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[2] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[2] + "\"}", XContentType.JSON) ); // using sort mode = max, values B and C will be used for the sort @@ -198,8 +199,8 @@ public void testNormalization() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); // searching for either of the terms should return both results since they collate to the same value @@ -244,8 +245,8 @@ public void testSecondaryStrength() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) @@ -290,8 +291,8 @@ public void testIgnorePunctuation() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) @@ -336,9 +337,9 @@ public void testIgnoreWhitespace() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"foo bar\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"foobar\"}", XContentType.JSON), - client().prepareIndex(index, type, "3").setSource("{\"id\":\"3\",\"collate\":\"foo-bar\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"foo bar\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"foobar\"}", XContentType.JSON), + client().prepareIndex(index).setId("3").setSource("{\"id\":\"3\",\"collate\":\"foo-bar\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) @@ -379,8 +380,8 @@ public void testNumerics() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"foobar-10\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"foobar-9\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"collate\":\"foobar-10\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"collate\":\"foobar-9\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) @@ -419,10 +420,10 @@ public void testIgnoreAccentsButNotCase() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"résumé\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"Resume\"}", XContentType.JSON), - client().prepareIndex(index, type, "3").setSource("{\"id\":\"3\",\"collate\":\"resume\"}", XContentType.JSON), - client().prepareIndex(index, type, "4").setSource("{\"id\":\"4\",\"collate\":\"Résumé\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"résumé\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"Resume\"}", XContentType.JSON), + client().prepareIndex(index).setId("3").setSource("{\"id\":\"3\",\"collate\":\"resume\"}", XContentType.JSON), + client().prepareIndex(index).setId("4").setSource("{\"id\":\"4\",\"collate\":\"Résumé\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) @@ -458,8 +459,8 @@ public void testUpperCaseFirst() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"resume\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"Resume\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"collate\":\"resume\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"collate\":\"Resume\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) @@ -507,8 +508,8 @@ public void testCustomRules() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) diff --git a/plugins/analysis-ukrainian/build.gradle b/plugins/analysis-ukrainian/build.gradle index 60ab39997216c..9e4bb9c647859 100644 --- a/plugins/analysis-ukrainian/build.gradle +++ b/plugins/analysis-ukrainian/build.gradle @@ -37,7 +37,7 @@ opensearchplugin { dependencies { api "org.apache.lucene:lucene-analyzers-morfologik:${versions.lucene}" api "org.carrot2:morfologik-stemming:2.1.8" - api "org.carrot2:morfologik-fsa:2.1.1" + api "org.carrot2:morfologik-fsa:2.1.8" api "ua.net.nlp:morfologik-ukrainian-search:4.9.1" } diff --git a/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.1.jar.sha1 deleted file mode 100644 index 07d523ec0c82b..0000000000000 --- a/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -87866deba6aa5d19956fbe3406d8ddb5f19f5352 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.8.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.8.jar.sha1 new file mode 100644 index 0000000000000..0b81b8051a3ba --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.8.jar.sha1 @@ -0,0 +1 @@ +68e23e2c57fe5699d511b3a7a2f202f90020e214 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 7bb9250ea40a6..968f4efb3fa1e 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -63,7 +63,7 @@ dependencies { api 'com.sun.xml.bind:jaxb-impl:2.2.3-1' // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, - // and whitelist this hack in JarHell + // and allowlist this hack in JarHell api 'javax.xml.bind:jaxb-api:2.3.1' } diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java index eac46356d9127..2686c376213f3 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java @@ -33,42 +33,15 @@ package org.opensearch.discovery.ec2; import com.amazonaws.services.ec2.AmazonEC2; - -import org.opensearch.common.lease.Releasable; -import org.opensearch.common.util.concurrent.AbstractRefCounted; +import org.opensearch.common.concurrent.RefCountedReleasable; /** * Handles the shutdown of the wrapped {@link AmazonEC2} using reference * counting. */ -public class AmazonEc2Reference extends AbstractRefCounted implements Releasable { - - private final AmazonEC2 client; +public class AmazonEc2Reference extends RefCountedReleasable { AmazonEc2Reference(AmazonEC2 client) { - super("AWS_EC2_CLIENT"); - this.client = client; + super("AWS_EC2_CLIENT", client, client::shutdown); } - - /** - * Call when the client is not needed anymore. - */ - @Override - public void close() { - decRef(); - } - - /** - * Returns the underlying `AmazonEC2` client. All method calls are permitted BUT - * NOT shutdown. Shutdown is called when reference count reaches 0. - */ - public AmazonEC2 client() { - return client; - } - - @Override - protected void closeInternal() { - client.shutdown(); - } - } diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java index 4b36a60bb278f..f26ecfab501f8 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java @@ -129,7 +129,7 @@ protected List fetchDynamicNodes() { // NOTE: we don't filter by security group during the describe instances request for two reasons: // 1. differences in VPCs require different parameters during query (ID vs Name) // 2. We want to use two different strategies: (all security groups vs. any security groups) - descInstances = SocketAccess.doPrivileged(() -> clientReference.client().describeInstances(buildDescribeInstancesRequest())); + descInstances = SocketAccess.doPrivileged(() -> clientReference.get().describeInstances(buildDescribeInstancesRequest())); } catch (final AmazonClientException e) { logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage()); logger.debug("Full exception:", e); diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java index be6261583bdd1..cb19c0d4255ac 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -103,7 +103,7 @@ public void testNodeAttributesErrorLenient() throws Exception { public void testDefaultEndpoint() throws IOException { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { - final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().get()).endpoint; assertThat(endpoint, is("")); } } @@ -111,7 +111,7 @@ public void testDefaultEndpoint() throws IOException { public void testSpecificEndpoint() throws IOException { final Settings settings = Settings.builder().put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2.endpoint").build(); try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings)) { - final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().get()).endpoint; assertThat(endpoint, is("ec2.endpoint")); } } @@ -150,7 +150,7 @@ public void testClientSettingsReInit() throws IOException { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings1)) { try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("ec2_access_1")); assertThat(credentials.getAWSSecretKey(), is("ec2_secret_1")); if (mockSecure1HasSessionToken) { @@ -159,32 +159,32 @@ public void testClientSettingsReInit() throws IOException { } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_1")); } // reload secure settings2 plugin.reload(settings2); // client is not released, it is still using the old settings { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); if (mockSecure1HasSessionToken) { assertThat(credentials, instanceOf(BasicSessionCredentials.class)); assertThat(((BasicSessionCredentials) credentials).getSessionToken(), is("ec2_session_token_1")); } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_1")); } } try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("ec2_access_2")); assertThat(credentials.getAWSSecretKey(), is("ec2_secret_2")); if (mockSecure2HasSessionToken) { @@ -193,11 +193,11 @@ public void testClientSettingsReInit() throws IOException { } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(882)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(882)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_2")); } } } diff --git a/plugins/examples/painless-whitelist/build.gradle b/plugins/examples/painless-whitelist/build.gradle index 61888efbcf1df..70052c209ab61 100644 --- a/plugins/examples/painless-whitelist/build.gradle +++ b/plugins/examples/painless-whitelist/build.gradle @@ -32,7 +32,7 @@ apply plugin: 'opensearch.yaml-rest-test' opensearchplugin { name 'painless-whitelist' - description 'An example whitelisting additional classes and methods in painless' + description 'An example allowlisting additional classes and methods in painless' classname 'org.opensearch.example.painlesswhitelist.MyWhitelistPlugin' extendedPlugins = ['lang-painless'] licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') diff --git a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistExtension.java b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistExtension.java index 471c28ea445d0..74adcf5e4f57a 100644 --- a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistExtension.java +++ b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistExtension.java @@ -46,19 +46,19 @@ import java.util.List; import java.util.Map; -/** An extension of painless which adds a whitelist. */ +/** An extension of painless which adds an allowlist. */ public class ExampleWhitelistExtension implements PainlessExtension { @Override public Map, List> getContextWhitelists() { Map parsers = new HashMap<>(WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS); parsers.put(ExamplePainlessAnnotation.NAME, ExampleWhitelistAnnotationParser.INSTANCE); - Whitelist classWhitelist = WhitelistLoader.loadFromResourceFiles(ExampleWhitelistExtension.class, parsers, "example_whitelist.txt"); + Whitelist classAllowlist = WhitelistLoader.loadFromResourceFiles(ExampleWhitelistExtension.class, parsers, "example_whitelist.txt"); - ExampleWhitelistedInstance ewi = new ExampleWhitelistedInstance(1); + ExampleWhitelistedInstance eai = new ExampleWhitelistedInstance(1); WhitelistInstanceBinding addValue = new WhitelistInstanceBinding( "example addValue", - ewi, + eai, "addValue", "int", Collections.singletonList("int"), @@ -66,20 +66,20 @@ public Map, List> getContextWhitelists() { ); WhitelistInstanceBinding getValue = new WhitelistInstanceBinding( "example getValue", - ewi, + eai, "getValue", "int", Collections.emptyList(), Collections.emptyList() ); - Whitelist instanceWhitelist = new Whitelist( - ewi.getClass().getClassLoader(), + Whitelist instanceAllowlist = new Whitelist( + eai.getClass().getClassLoader(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Arrays.asList(addValue, getValue) ); - return Collections.singletonMap(FieldScript.CONTEXT, Arrays.asList(classWhitelist, instanceWhitelist)); + return Collections.singletonMap(FieldScript.CONTEXT, Arrays.asList(classAllowlist, instanceAllowlist)); } } diff --git a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistedClass.java b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistedClass.java index 1798375686d12..5832a2ee59a85 100644 --- a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistedClass.java +++ b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistedClass.java @@ -33,9 +33,9 @@ package org.opensearch.example.painlesswhitelist; /** - * An example of a class to be whitelisted for use by painless scripts + * An example of a class to be allowlisted for use by painless scripts * - * Each of the members and methods below are whitelisted for use in search scripts. + * Each of the members and methods below are allowlisted for use in search scripts. * See example_whitelist.txt. */ public class ExampleWhitelistedClass { @@ -68,7 +68,7 @@ public static int toInt(String x) { return Integer.parseInt(x); } - // example method to attach annotations in whitelist + // example method to attach annotations in allowlist public void annotate() { // some logic here } diff --git a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/MyWhitelistPlugin.java b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/MyWhitelistPlugin.java index 38a95545c46cb..ab6ba53e4039f 100644 --- a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/MyWhitelistPlugin.java +++ b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/MyWhitelistPlugin.java @@ -35,5 +35,5 @@ import org.opensearch.plugins.Plugin; public class MyWhitelistPlugin extends Plugin { - // we don't actually need anything here, since whitelists are extended through SPI + // we don't actually need anything here, since allowlists are extended through SPI } diff --git a/plugins/examples/painless-whitelist/src/main/resources/org/opensearch/example/painlesswhitelist/example_whitelist.txt b/plugins/examples/painless-whitelist/src/main/resources/org/opensearch/example/painlesswhitelist/example_whitelist.txt index 5c6c605c7c28a..8f2ccaf05f2f3 100644 --- a/plugins/examples/painless-whitelist/src/main/resources/org/opensearch/example/painlesswhitelist/example_whitelist.txt +++ b/plugins/examples/painless-whitelist/src/main/resources/org/opensearch/example/painlesswhitelist/example_whitelist.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist for an example class which may be access from painless +# This file contains an allowlist for an example class which may be access from painless class org.opensearch.example.painlesswhitelist.ExampleWhitelistedClass { # constructor diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/10_basic.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/10_basic.yml index 1b8870582375d..cc3762eb42d68 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/10_basic.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/10_basic.yml @@ -1,4 +1,4 @@ -# Integration tests for the painless whitelist example plugin +# Integration tests for the painless allowlist example plugin # "Plugin loaded": - skip: diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml index 51a440142fd5e..92289af179278 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml @@ -1,4 +1,4 @@ -# Example test using whitelisted members and methods +# Example test using allowlisted members and methods "Whitelisted custom class": - do: diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml index c6d8048b97961..447e1c2a8271f 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml @@ -1,4 +1,4 @@ -# Example test using whitelisted statically imported method +# Example test using allowlisted statically imported method "custom static imported method": - do: diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java index 10edd6d2586d9..2cf05da26c193 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java @@ -36,6 +36,7 @@ import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.plugin.mapper.MapperSizePlugin; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchIntegTestCase; @@ -62,13 +63,13 @@ protected Collection> nodePlugins() { // issue 5053 public void testThatUpdatingMappingShouldNotRemoveSizeMappingConfiguration() throws Exception { String index = "foo"; - String type = "mytype"; + String type = MapperService.SINGLE_MAPPING_NAME; XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); // check mapping again - assertSizeMappingEnabled(index, type, true); + assertSizeMappingEnabled(index, true); // update some field in the mapping XContentBuilder updateMappingBuilder = jsonBuilder().startObject() @@ -78,27 +79,22 @@ public void testThatUpdatingMappingShouldNotRemoveSizeMappingConfiguration() thr .endObject() .endObject() .endObject(); - AcknowledgedResponse putMappingResponse = client().admin() - .indices() - .preparePutMapping(index) - .setType(type) - .setSource(updateMappingBuilder) - .get(); + AcknowledgedResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setSource(updateMappingBuilder).get(); assertAcked(putMappingResponse); // make sure size field is still in mapping - assertSizeMappingEnabled(index, type, true); + assertSizeMappingEnabled(index, true); } public void testThatSizeCanBeSwitchedOnAndOff() throws Exception { String index = "foo"; - String type = "mytype"; + String type = MapperService.SINGLE_MAPPING_NAME; XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); // check mapping again - assertSizeMappingEnabled(index, type, true); + assertSizeMappingEnabled(index, true); // update some field in the mapping XContentBuilder updateMappingBuilder = jsonBuilder().startObject() @@ -106,27 +102,21 @@ public void testThatSizeCanBeSwitchedOnAndOff() throws Exception { .field("enabled", false) .endObject() .endObject(); - AcknowledgedResponse putMappingResponse = client().admin() - .indices() - .preparePutMapping(index) - .setType(type) - .setSource(updateMappingBuilder) - .get(); + AcknowledgedResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setSource(updateMappingBuilder).get(); assertAcked(putMappingResponse); // make sure size field is still in mapping - assertSizeMappingEnabled(index, type, false); + assertSizeMappingEnabled(index, false); } - private void assertSizeMappingEnabled(String index, String type, boolean enabled) throws IOException { + private void assertSizeMappingEnabled(String index, boolean enabled) throws IOException { String errMsg = String.format( Locale.ROOT, - "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s/%s", - index, - type + "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s", + index ); - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes(type).get(); - Map mappingSource = getMappingsResponse.getMappings().get(index).get(type).getSourceAsMap(); + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).get(); + Map mappingSource = getMappingsResponse.getMappings().get(index).getSourceAsMap(); assertThat(errMsg, mappingSource, hasKey("_size")); String sizeAsString = mappingSource.get("_size").toString(); assertThat(sizeAsString, is(notNullValue())); @@ -134,9 +124,9 @@ private void assertSizeMappingEnabled(String index, String type, boolean enabled } public void testBasic() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "_size", "enabled=true")); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, "_size", "enabled=true")); final String source = "{\"f\":10}"; - indexRandom(true, client().prepareIndex("test", "type", "1").setSource(source, XContentType.JSON)); + indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON)); GetResponse getResponse = client().prepareGet("test", "1").setStoredFields("_size").get(); assertNotNull(getResponse.getField("_size")); assertEquals(source.length(), (int) getResponse.getField("_size").getValue()); diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 88ce2f667cee2..1f923b8f36bbd 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -45,7 +45,7 @@ opensearchplugin { dependencies { api 'com.azure:azure-core:1.22.0' - api 'com.azure:azure-storage-common:12.14.0' + api 'com.azure:azure-storage-common:12.14.3' api 'com.azure:azure-core-http-netty:1.11.7' api "io.netty:netty-codec-dns:${versions.netty}" api "io.netty:netty-codec-socks:${versions.netty}" @@ -68,7 +68,7 @@ dependencies { api "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}" api 'org.codehaus.woodstox:stax2-api:4.2.1' implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1' - runtimeOnly 'com.google.guava:guava:30.1.1-jre' + runtimeOnly 'com.google.guava:guava:31.1-jre' api 'org.apache.commons:commons-lang3:3.4' testImplementation project(':test:fixtures:azure-fixture') } diff --git a/plugins/repository-azure/licenses/azure-storage-common-12.14.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-common-12.14.0.jar.sha1 deleted file mode 100644 index c2fbd451e785e..0000000000000 --- a/plugins/repository-azure/licenses/azure-storage-common-12.14.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ed58d3438a7fa3a2a5e9f60c0111795101dc8bf6 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 new file mode 100644 index 0000000000000..b7cb4342c014c --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 @@ -0,0 +1 @@ +e8d6258aa8bf1594980c01294e60de74d13a815f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 b/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 deleted file mode 100644 index 39e641fc7834f..0000000000000 --- a/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -87e0fd1df874ea3cbe577702fe6f17068b790fd8 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 b/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 new file mode 100644 index 0000000000000..e57390ebe1299 --- /dev/null +++ b/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 @@ -0,0 +1 @@ +60458f877d055d0c9114d9e1a2efb737b4bc282c \ No newline at end of file diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 6d2966faa59cf..c9c7c8e6ffced 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -65,7 +65,7 @@ dependencies { api "org.apache.logging.log4j:log4j-core:${versions.log4j}" api 'org.apache.avro:avro:1.10.2' api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" - api 'com.google.code.gson:gson:2.8.9' + api 'com.google.code.gson:gson:2.9.0' runtimeOnly 'com.google.guava:guava:30.1.1-jre' api 'com.google.protobuf:protobuf-java:3.19.3' api 'commons-logging:commons-logging:1.1.3' diff --git a/plugins/repository-hdfs/licenses/gson-2.8.9.jar.sha1 b/plugins/repository-hdfs/licenses/gson-2.8.9.jar.sha1 deleted file mode 100644 index f7a8108d8c8e6..0000000000000 --- a/plugins/repository-hdfs/licenses/gson-2.8.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8a432c1d6825781e21a02db2e2c33c5fde2833b9 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/gson-2.9.0.jar.sha1 b/plugins/repository-hdfs/licenses/gson-2.9.0.jar.sha1 new file mode 100644 index 0000000000000..8e9626b0c949b --- /dev/null +++ b/plugins/repository-hdfs/licenses/gson-2.9.0.jar.sha1 @@ -0,0 +1 @@ +8a1167e089096758b49f9b34066ef98b2f4b37aa \ No newline at end of file diff --git a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java index 02350499b1466..46d97f41b604f 100644 --- a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java @@ -88,9 +88,9 @@ public void testSimpleWorkflow() { logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-1").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-2").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-3").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); } client().admin().indices().prepareRefresh().get(); assertThat(count(client, "test-idx-1"), equalTo(100L)); @@ -117,13 +117,13 @@ public void testSimpleWorkflow() { logger.info("--> delete some data"); for (int i = 0; i < 50; i++) { - client.prepareDelete("test-idx-1", "doc", Integer.toString(i)).get(); + client.prepareDelete("test-idx-1", Integer.toString(i)).get(); } for (int i = 50; i < 100; i++) { - client.prepareDelete("test-idx-2", "doc", Integer.toString(i)).get(); + client.prepareDelete("test-idx-2", Integer.toString(i)).get(); } for (int i = 0; i < 100; i += 2) { - client.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get(); + client.prepareDelete("test-idx-3", Integer.toString(i)).get(); } client().admin().indices().prepareRefresh().get(); assertThat(count(client, "test-idx-1"), equalTo(50L)); diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 21ad7b6dd54c1..c5939958c816a 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -64,7 +64,7 @@ dependencies { api "joda-time:joda-time:${versions.joda}" // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, - // and whitelist this hack in JarHell + // and allowlist this hack in JarHell api 'javax.xml.bind:jaxb-api:2.3.1' testImplementation project(':test:fixtures:s3-fixture') diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java index 239918206f397..62e415705a011 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java @@ -32,45 +32,17 @@ package org.opensearch.repositories.s3; -import org.opensearch.common.util.concurrent.AbstractRefCounted; - import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; - -import org.opensearch.common.lease.Releasable; +import org.opensearch.common.concurrent.RefCountedReleasable; /** * Handles the shutdown of the wrapped {@link AmazonS3Client} using reference * counting. */ -public class AmazonS3Reference extends AbstractRefCounted implements Releasable { - - private final AmazonS3 client; +public class AmazonS3Reference extends RefCountedReleasable { AmazonS3Reference(AmazonS3 client) { - super("AWS_S3_CLIENT"); - this.client = client; - } - - /** - * Call when the client is not needed anymore. - */ - @Override - public void close() { - decRef(); + super("AWS_S3_CLIENT", client, client::shutdown); } - - /** - * Returns the underlying `AmazonS3` client. All method calls are permitted BUT - * NOT shutdown. Shutdown is called when reference count reaches 0. - */ - public AmazonS3 client() { - return client; - } - - @Override - protected void closeInternal() { - client.shutdown(); - } - } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/ProxySettings.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/ProxySettings.java new file mode 100644 index 0000000000000..430af0096d8b5 --- /dev/null +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/ProxySettings.java @@ -0,0 +1,123 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.repositories.s3; + +import com.amazonaws.Protocol; +import org.opensearch.common.Strings; +import org.opensearch.common.settings.SettingsException; + +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.Objects; + +public class ProxySettings { + public static final ProxySettings NO_PROXY_SETTINGS = new ProxySettings(ProxyType.DIRECT, null, -1, null, null); + + public static enum ProxyType { + HTTP(Protocol.HTTP.name()), + HTTPS(Protocol.HTTPS.name()), + SOCKS("SOCKS"), + DIRECT("DIRECT"); + + private final String name; + + private ProxyType(String name) { + this.name = name; + } + + public Protocol toProtocol() { + if (this == DIRECT) { + // We check it in settings, + // the probability that it could be thrown is small, but how knows + throw new SettingsException("Couldn't convert to S3 protocol"); + } else if (this == SOCKS) { + throw new SettingsException("Couldn't convert to S3 protocol. SOCKS is not supported"); + } + return Protocol.valueOf(name()); + } + + } + + private final ProxyType type; + + private final String host; + + private final String username; + + private final String password; + + private final int port; + + public String getHost() { + return host; + } + + public ProxySettings(final ProxyType type, final String host, final int port, final String username, final String password) { + this.type = type; + this.host = host; + this.port = port; + this.username = username; + this.password = password; + } + + public ProxyType getType() { + return this.type; + } + + public String getHostName() { + return host; + } + + public int getPort() { + return port; + } + + public InetSocketAddress getAddress() { + try { + return new InetSocketAddress(InetAddress.getByName(host), port); + } catch (UnknownHostException e) { + // this error won't be thrown since validation of the host name is in the S3ClientSettings + throw new RuntimeException(e); + } + } + + public String getUsername() { + return this.username; + } + + public String getPassword() { + return this.password; + } + + public boolean isAuthenticated() { + return Strings.isNullOrEmpty(username) == false && Strings.isNullOrEmpty(password) == false; + } + + public ProxySettings recreateWithNewHostAndPort(final String host, final int port) { + return new ProxySettings(type, host, port, username, password); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + final ProxySettings that = (ProxySettings) o; + return port == that.port + && type == that.type + && Objects.equals(host, that.host) + && Objects.equals(username, that.username) + && Objects.equals(password, that.password); + } + + @Override + public int hashCode() { + return Objects.hash(type, host, username, password, port); + } +} diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java index 5a9c03c0b2a37..678be7c6f13f2 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java @@ -101,7 +101,7 @@ class S3BlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(String blobName) { try (AmazonS3Reference clientReference = blobStore.clientReference()) { - return SocketAccess.doPrivileged(() -> clientReference.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); + return SocketAccess.doPrivileged(() -> clientReference.get().doesObjectExist(blobStore.bucket(), buildKey(blobName))); } catch (final Exception e) { throw new BlobStoreException("Failed to check if blob [" + blobName + "] exists", e); } @@ -169,13 +169,13 @@ public DeleteResult delete() throws IOException { ObjectListing list; if (prevListing != null) { final ObjectListing finalPrevListing = prevListing; - list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listNextBatchOfObjects(finalPrevListing)); } else { final ListObjectsRequest listObjectsRequest = new ListObjectsRequest(); listObjectsRequest.setBucketName(blobStore.bucket()); listObjectsRequest.setPrefix(keyPath); listObjectsRequest.setRequestMetricCollector(blobStore.listMetricCollector); - list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listObjects(listObjectsRequest)); } final List blobsToDelete = new ArrayList<>(); list.getObjectSummaries().forEach(s3ObjectSummary -> { @@ -236,7 +236,7 @@ private void doDeleteBlobs(List blobNames, boolean relative) throws IOEx .map(DeleteObjectsRequest.KeyVersion::getKey) .collect(Collectors.toList()); try { - clientReference.client().deleteObjects(deleteRequest); + clientReference.get().deleteObjects(deleteRequest); outstanding.removeAll(keysInRequest); } catch (MultiObjectDeleteException e) { // We are sending quiet mode requests so we can't use the deleted keys entry on the exception and instead @@ -324,9 +324,9 @@ private static List executeListing(AmazonS3Reference clientRefere ObjectListing list; if (prevListing != null) { final ObjectListing finalPrevListing = prevListing; - list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listNextBatchOfObjects(finalPrevListing)); } else { - list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listObjects(listObjectsRequest)); } results.add(list); if (list.isTruncated()) { @@ -374,7 +374,7 @@ void executeSingleUpload(final S3BlobStore blobStore, final String blobName, fin putRequest.setRequestMetricCollector(blobStore.putMetricCollector); try (AmazonS3Reference clientReference = blobStore.clientReference()) { - SocketAccess.doPrivilegedVoid(() -> { clientReference.client().putObject(putRequest); }); + SocketAccess.doPrivilegedVoid(() -> { clientReference.get().putObject(putRequest); }); } catch (final AmazonClientException e) { throw new IOException("Unable to upload object [" + blobName + "] using a single upload", e); } @@ -413,7 +413,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, } try (AmazonS3Reference clientReference = blobStore.clientReference()) { - uploadId.set(SocketAccess.doPrivileged(() -> clientReference.client().initiateMultipartUpload(initRequest).getUploadId())); + uploadId.set(SocketAccess.doPrivileged(() -> clientReference.get().initiateMultipartUpload(initRequest).getUploadId())); if (Strings.isEmpty(uploadId.get())) { throw new IOException("Failed to initialize multipart upload " + blobName); } @@ -439,7 +439,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, } bytesCount += uploadRequest.getPartSize(); - final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.client().uploadPart(uploadRequest)); + final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.get().uploadPart(uploadRequest)); parts.add(uploadResponse.getPartETag()); } @@ -456,7 +456,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, parts ); complRequest.setRequestMetricCollector(blobStore.multiPartUploadMetricCollector); - SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); + SocketAccess.doPrivilegedVoid(() -> clientReference.get().completeMultipartUpload(complRequest)); success = true; } catch (final AmazonClientException e) { @@ -465,7 +465,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, if ((success == false) && Strings.hasLength(uploadId.get())) { final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(bucketName, blobName, uploadId.get()); try (AmazonS3Reference clientReference = blobStore.clientReference()) { - SocketAccess.doPrivilegedVoid(() -> clientReference.client().abortMultipartUpload(abortRequest)); + SocketAccess.doPrivilegedVoid(() -> clientReference.get().abortMultipartUpload(abortRequest)); } } } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java index 805f48aae9b2d..e02c7cae89378 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java @@ -34,13 +34,18 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; +import org.opensearch.common.Strings; +import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.SecureSetting; import org.opensearch.common.settings.SecureString; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsException; import org.opensearch.common.unit.TimeValue; +import java.net.InetAddress; +import java.net.UnknownHostException; import java.util.Collections; import java.util.HashMap; import java.util.Locale; @@ -54,6 +59,8 @@ */ final class S3ClientSettings { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(S3ClientSettings.class); + // prefix for s3 client settings private static final String PREFIX = "s3.client."; @@ -95,6 +102,13 @@ final class S3ClientSettings { key -> new Setting<>(key, "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope) ); + /** The protocol to use to connect to s3. */ + static final Setting.AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting( + PREFIX, + "proxy.type", + key -> new Setting<>(key, "direct", s -> ProxySettings.ProxyType.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope) + ); + /** The host name of a proxy to connect to s3 through. */ static final Setting.AffixSetting PROXY_HOST_SETTING = Setting.affixKeySetting( PREFIX, @@ -106,7 +120,7 @@ final class S3ClientSettings { static final Setting.AffixSetting PROXY_PORT_SETTING = Setting.affixKeySetting( PREFIX, "proxy.port", - key -> Setting.intSetting(key, 80, 0, 1 << 16, Property.NodeScope) + key -> Setting.intSetting(key, 80, 0, (1 << 16) - 1, Property.NodeScope) ); /** The username of a proxy to connect to s3 through. */ @@ -181,19 +195,8 @@ final class S3ClientSettings { /** The protocol to use to talk to s3. Defaults to https. */ final Protocol protocol; - /** An optional proxy host that requests to s3 should be made through. */ - final String proxyHost; - - /** The port number the proxy host should be connected on. */ - final int proxyPort; - - // these should be "secure" yet the api for the s3 client only takes String, so storing them - // as SecureString here won't really help with anything - /** An optional username for the proxy host, for basic authentication. */ - final String proxyUsername; - - /** An optional password for the proxy host, for basic authentication. */ - final String proxyPassword; + /** An optional proxy settings that requests to s3 should be made through. */ + final ProxySettings proxySettings; /** The read timeout for the s3 client. */ final int readTimeoutMillis; @@ -220,25 +223,18 @@ private S3ClientSettings( S3BasicCredentials credentials, String endpoint, Protocol protocol, - String proxyHost, - int proxyPort, - String proxyUsername, - String proxyPassword, int readTimeoutMillis, int maxRetries, boolean throttleRetries, boolean pathStyleAccess, boolean disableChunkedEncoding, String region, - String signerOverride + String signerOverride, + ProxySettings proxySettings ) { this.credentials = credentials; this.endpoint = endpoint; this.protocol = protocol; - this.proxyHost = proxyHost; - this.proxyPort = proxyPort; - this.proxyUsername = proxyUsername; - this.proxyPassword = proxyPassword; this.readTimeoutMillis = readTimeoutMillis; this.maxRetries = maxRetries; this.throttleRetries = throttleRetries; @@ -246,6 +242,7 @@ private S3ClientSettings( this.disableChunkedEncoding = disableChunkedEncoding; this.region = region; this.signerOverride = signerOverride; + this.proxySettings = proxySettings; } /** @@ -263,8 +260,10 @@ S3ClientSettings refine(Settings repositorySettings) { final String newEndpoint = getRepoSettingOrDefault(ENDPOINT_SETTING, normalizedSettings, endpoint); final Protocol newProtocol = getRepoSettingOrDefault(PROTOCOL_SETTING, normalizedSettings, protocol); - final String newProxyHost = getRepoSettingOrDefault(PROXY_HOST_SETTING, normalizedSettings, proxyHost); - final int newProxyPort = getRepoSettingOrDefault(PROXY_PORT_SETTING, normalizedSettings, proxyPort); + + final String newProxyHost = getRepoSettingOrDefault(PROXY_HOST_SETTING, normalizedSettings, proxySettings.getHostName()); + final int newProxyPort = getRepoSettingOrDefault(PROXY_PORT_SETTING, normalizedSettings, proxySettings.getPort()); + final int newReadTimeoutMillis = Math.toIntExact( getRepoSettingOrDefault(READ_TIMEOUT_SETTING, normalizedSettings, TimeValue.timeValueMillis(readTimeoutMillis)).millis() ); @@ -286,8 +285,8 @@ S3ClientSettings refine(Settings repositorySettings) { final String newSignerOverride = getRepoSettingOrDefault(SIGNER_OVERRIDE, normalizedSettings, signerOverride); if (Objects.equals(endpoint, newEndpoint) && protocol == newProtocol - && Objects.equals(proxyHost, newProxyHost) - && proxyPort == newProxyPort + && Objects.equals(proxySettings.getHostName(), newProxyHost) + && proxySettings.getPort() == newProxyPort && newReadTimeoutMillis == readTimeoutMillis && maxRetries == newMaxRetries && newThrottleRetries == throttleRetries @@ -298,21 +297,20 @@ S3ClientSettings refine(Settings repositorySettings) { && Objects.equals(signerOverride, newSignerOverride)) { return this; } + + validateInetAddressFor(newProxyHost); return new S3ClientSettings( newCredentials, newEndpoint, newProtocol, - newProxyHost, - newProxyPort, - proxyUsername, - proxyPassword, newReadTimeoutMillis, newMaxRetries, newThrottleRetries, newPathStyleAccess, newDisableChunkedEncoding, newRegion, - newSignerOverride + newSignerOverride, + proxySettings.recreateWithNewHostAndPort(newProxyHost, newProxyPort) ); } @@ -401,27 +399,69 @@ private static S3BasicCredentials loadCredentials(Settings settings, String clie // pkg private for tests /** Parse settings for a single client. */ static S3ClientSettings getClientSettings(final Settings settings, final String clientName) { - try ( - SecureString proxyUsername = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); - SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING) - ) { - return new S3ClientSettings( - S3ClientSettings.loadCredentials(settings, clientName), - getConfigValue(settings, clientName, ENDPOINT_SETTING), - getConfigValue(settings, clientName, PROTOCOL_SETTING), - getConfigValue(settings, clientName, PROXY_HOST_SETTING), - getConfigValue(settings, clientName, PROXY_PORT_SETTING), - proxyUsername.toString(), - proxyPassword.toString(), - Math.toIntExact(getConfigValue(settings, clientName, READ_TIMEOUT_SETTING).millis()), - getConfigValue(settings, clientName, MAX_RETRIES_SETTING), - getConfigValue(settings, clientName, USE_THROTTLE_RETRIES_SETTING), - getConfigValue(settings, clientName, USE_PATH_STYLE_ACCESS), - getConfigValue(settings, clientName, DISABLE_CHUNKED_ENCODING), - getConfigValue(settings, clientName, REGION), - getConfigValue(settings, clientName, SIGNER_OVERRIDE) + final Protocol awsProtocol = getConfigValue(settings, clientName, PROTOCOL_SETTING); + return new S3ClientSettings( + S3ClientSettings.loadCredentials(settings, clientName), + getConfigValue(settings, clientName, ENDPOINT_SETTING), + awsProtocol, + Math.toIntExact(getConfigValue(settings, clientName, READ_TIMEOUT_SETTING).millis()), + getConfigValue(settings, clientName, MAX_RETRIES_SETTING), + getConfigValue(settings, clientName, USE_THROTTLE_RETRIES_SETTING), + getConfigValue(settings, clientName, USE_PATH_STYLE_ACCESS), + getConfigValue(settings, clientName, DISABLE_CHUNKED_ENCODING), + getConfigValue(settings, clientName, REGION), + getConfigValue(settings, clientName, SIGNER_OVERRIDE), + validateAndCreateProxySettings(settings, clientName, awsProtocol) + ); + } + + static ProxySettings validateAndCreateProxySettings(final Settings settings, final String clientName, final Protocol awsProtocol) { + ProxySettings.ProxyType proxyType = getConfigValue(settings, clientName, PROXY_TYPE_SETTING); + final String proxyHost = getConfigValue(settings, clientName, PROXY_HOST_SETTING); + final int proxyPort = getConfigValue(settings, clientName, PROXY_PORT_SETTING); + final SecureString proxyUserName = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); + final SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING); + if (awsProtocol != Protocol.HTTPS && proxyType == ProxySettings.ProxyType.DIRECT && Strings.hasText(proxyHost)) { + // This is backward compatibility for the current behaviour. + // The default value for Protocol settings is HTTPS, + // The expectation of ex-developers that protocol is the same as the proxy protocol + // which is a separate setting for AWS SDK. + // In this case, proxy type should be the same as a protocol, + // when proxy host and port have been set + proxyType = ProxySettings.ProxyType.valueOf(awsProtocol.name()); + deprecationLogger.deprecate( + PROTOCOL_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + "Using of " + + PROTOCOL_SETTING.getConcreteSettingForNamespace(clientName).getKey() + + " as proxy type is deprecated and will be removed in future releases. Please use " + + PROXY_TYPE_SETTING.getConcreteSettingForNamespace(clientName).getKey() + + " instead to specify proxy type." ); } + // Validate proxy settings + if (proxyType == ProxySettings.ProxyType.DIRECT + && (proxyPort != 80 || Strings.hasText(proxyHost) || Strings.hasText(proxyUserName) || Strings.hasText(proxyPassword))) { + throw new SettingsException("S3 proxy port or host or username or password have been set but proxy type is not defined."); + } + if (proxyType != ProxySettings.ProxyType.DIRECT && Strings.isEmpty(proxyHost)) { + throw new SettingsException("S3 proxy type has been set but proxy host or port is not defined."); + } + if (proxyType == ProxySettings.ProxyType.DIRECT) { + return ProxySettings.NO_PROXY_SETTINGS; + } + if (awsProtocol == Protocol.HTTP && proxyType == ProxySettings.ProxyType.SOCKS) { + throw new SettingsException("SOCKS proxy is not supported for HTTP protocol"); + } + validateInetAddressFor(proxyHost); + return new ProxySettings(proxyType, proxyHost, proxyPort, proxyUserName.toString(), proxyPassword.toString()); + } + + static void validateInetAddressFor(final String proxyHost) { + try { + InetAddress.getByName(proxyHost); + } catch (final UnknownHostException e) { + throw new SettingsException("S3 proxy host is unknown.", e); + } } @Override @@ -433,16 +473,13 @@ public boolean equals(final Object o) { return false; } final S3ClientSettings that = (S3ClientSettings) o; - return proxyPort == that.proxyPort - && readTimeoutMillis == that.readTimeoutMillis + return readTimeoutMillis == that.readTimeoutMillis && maxRetries == that.maxRetries && throttleRetries == that.throttleRetries && Objects.equals(credentials, that.credentials) && Objects.equals(endpoint, that.endpoint) && protocol == that.protocol - && Objects.equals(proxyHost, that.proxyHost) - && Objects.equals(proxyUsername, that.proxyUsername) - && Objects.equals(proxyPassword, that.proxyPassword) + && proxySettings.equals(that.proxySettings) && Objects.equals(disableChunkedEncoding, that.disableChunkedEncoding) && Objects.equals(region, that.region) && Objects.equals(signerOverride, that.signerOverride); @@ -454,10 +491,7 @@ public int hashCode() { credentials, endpoint, protocol, - proxyHost, - proxyPort, - proxyUsername, - proxyPassword, + proxySettings, readTimeoutMillis, maxRetries, throttleRetries, diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java index 82c3367679c53..388f5b8d74a2b 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java @@ -110,7 +110,7 @@ private void openStream() throws IOException { + end; getObjectRequest.setRange(Math.addExact(start, currentOffset), end); } - final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); + final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.get().getObject(getObjectRequest)); this.currentStreamLastOffset = Math.addExact(Math.addExact(start, currentOffset), getStreamLength(s3Object)); this.currentStream = s3Object.getObjectContent(); } catch (final AmazonClientException e) { diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java index 1f5cb2a752eef..3ce19378ac05c 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java @@ -39,10 +39,16 @@ import com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.http.IdleConnectionReaper; +import com.amazonaws.http.SystemPropertyTlsKeyManagersProvider; +import com.amazonaws.http.conn.ssl.SdkTLSSocketFactory; +import com.amazonaws.internal.SdkSSLContext; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.internal.Constants; +import org.apache.http.conn.ssl.DefaultHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.protocol.HttpContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.cluster.metadata.RepositoryMetadata; @@ -50,7 +56,15 @@ import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.settings.Settings; +import javax.net.ssl.SSLContext; import java.io.Closeable; +import java.io.IOException; +import java.net.Authenticator; +import java.net.InetSocketAddress; +import java.net.PasswordAuthentication; +import java.net.Proxy; +import java.net.Socket; +import java.security.SecureRandom; import java.util.Map; import static java.util.Collections.emptyMap; @@ -189,12 +203,32 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { clientConfiguration.setResponseMetadataCacheSize(0); clientConfiguration.setProtocol(clientSettings.protocol); - if (Strings.hasText(clientSettings.proxyHost)) { - // TODO: remove this leniency, these settings should exist together and be validated - clientConfiguration.setProxyHost(clientSettings.proxyHost); - clientConfiguration.setProxyPort(clientSettings.proxyPort); - clientConfiguration.setProxyUsername(clientSettings.proxyUsername); - clientConfiguration.setProxyPassword(clientSettings.proxyPassword); + if (clientSettings.proxySettings != ProxySettings.NO_PROXY_SETTINGS) { + if (clientSettings.proxySettings.getType() == ProxySettings.ProxyType.SOCKS) { + SocketAccess.doPrivilegedVoid(() -> { + if (clientSettings.proxySettings.isAuthenticated()) { + Authenticator.setDefault(new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication( + clientSettings.proxySettings.getUsername(), + clientSettings.proxySettings.getPassword().toCharArray() + ); + } + }); + } + clientConfiguration.getApacheHttpClientConfig() + .setSslSocketFactory(createSocksSslConnectionSocketFactory(clientSettings.proxySettings.getAddress())); + }); + } else { + if (clientSettings.proxySettings.getType() != ProxySettings.ProxyType.DIRECT) { + clientConfiguration.setProxyProtocol(clientSettings.proxySettings.getType().toProtocol()); + } + clientConfiguration.setProxyHost(clientSettings.proxySettings.getHostName()); + clientConfiguration.setProxyPort(clientSettings.proxySettings.getPort()); + clientConfiguration.setProxyUsername(clientSettings.proxySettings.getUsername()); + clientConfiguration.setProxyPassword(clientSettings.proxySettings.getPassword()); + } } if (Strings.hasLength(clientSettings.signerOverride)) { @@ -208,6 +242,20 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { return clientConfiguration; } + private static SSLConnectionSocketFactory createSocksSslConnectionSocketFactory(final InetSocketAddress address) { + // This part was taken from AWS settings + final SSLContext sslCtx = SdkSSLContext.getPreferredSSLContext( + new SystemPropertyTlsKeyManagersProvider().getKeyManagers(), + new SecureRandom() + ); + return new SdkTLSSocketFactory(sslCtx, new DefaultHostnameVerifier()) { + @Override + public Socket createSocket(final HttpContext ctx) throws IOException { + return new Socket(new Proxy(Proxy.Type.SOCKS, address)); + } + }; + } + // pkg private for tests static AWSCredentialsProvider buildCredentials(Logger logger, S3ClientSettings clientSettings) { final S3BasicCredentials credentials = clientSettings.credentials; diff --git a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy index 8c9b91418ed53..f6c154bb3b14d 100644 --- a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy @@ -51,6 +51,9 @@ grant { // s3 client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; + // s3 client set Authenticator for proxy username/password + permission java.net.NetPermission "setDefaultAuthenticator"; + // only for tests : org.opensearch.repositories.s3.S3RepositoryPlugin permission java.util.PropertyPermission "opensearch.allow_insecure_settings", "read,write"; }; diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/AwsS3ServiceImplTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/AwsS3ServiceImplTests.java index 0f1bfdf7b7d6b..38d9ebf337731 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/AwsS3ServiceImplTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/AwsS3ServiceImplTests.java @@ -36,17 +36,19 @@ import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSStaticCredentialsProvider; - import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchTestCase; +import java.io.IOException; import java.util.Locale; import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.opensearch.repositories.s3.S3ClientSettings.PROTOCOL_SETTING; +import static org.opensearch.repositories.s3.S3ClientSettings.PROXY_TYPE_SETTING; public class AwsS3ServiceImplTests extends OpenSearchTestCase { @@ -140,14 +142,14 @@ public void testAWSConfigurationWithAwsSettings() { final Settings settings = Settings.builder() .setSecureSettings(secureSettings) .put("s3.client.default.protocol", "http") - .put("s3.client.default.proxy.host", "aws_proxy_host") + .put("s3.client.default.proxy.host", "127.0.0.10") .put("s3.client.default.proxy.port", 8080) .put("s3.client.default.read_timeout", "10s") .build(); launchAWSConfigurationTest( settings, Protocol.HTTP, - "aws_proxy_host", + "127.0.0.10", 8080, "aws_proxy_username", "aws_proxy_password", @@ -155,6 +157,60 @@ public void testAWSConfigurationWithAwsSettings() { ClientConfiguration.DEFAULT_THROTTLE_RETRIES, 10000 ); + assertWarnings( + "Using of " + + PROTOCOL_SETTING.getConcreteSettingForNamespace("default").getKey() + + " as proxy type is deprecated and will be removed in future releases. Please use " + + PROXY_TYPE_SETTING.getConcreteSettingForNamespace("default").getKey() + + " instead to specify proxy type." + ); + } + + public void testProxyTypeOverrideProtocolSettings() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.proxy.username", "aws_proxy_username"); + secureSettings.setString("s3.client.default.proxy.password", "aws_proxy_password"); + final Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .put("s3.client.default.protocol", "http") + .put("s3.client.default.proxy.type", "https") + .put("s3.client.default.proxy.host", "127.0.0.10") + .put("s3.client.default.proxy.port", 8080) + .put("s3.client.default.read_timeout", "10s") + .build(); + launchAWSConfigurationTest( + settings, + Protocol.HTTP, + "127.0.0.10", + 8080, + "aws_proxy_username", + "aws_proxy_password", + 3, + ClientConfiguration.DEFAULT_THROTTLE_RETRIES, + 10000 + ); + } + + public void testSocksProxyConfiguration() throws IOException { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.proxy.username", "aws_proxy_username"); + secureSettings.setString("s3.client.default.proxy.password", "aws_proxy_password"); + final Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .put("s3.client.default.proxy.type", "socks") + .put("s3.client.default.proxy.host", "127.0.0.10") + .put("s3.client.default.proxy.port", 8080) + .put("s3.client.default.read_timeout", "10s") + .build(); + + final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default"); + final ClientConfiguration configuration = S3Service.buildConfiguration(clientSettings); + + assertEquals(Protocol.HTTPS, configuration.getProtocol()); + assertEquals(Protocol.HTTP, configuration.getProxyProtocol()); // default value in SDK + assertEquals(-1, configuration.getProxyPort()); + assertNull(configuration.getProxyUsername()); + assertNull(configuration.getProxyPassword()); } public void testRepositoryMaxRetries() { diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java index 645fe5cf1d134..9c359d67db88b 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java @@ -123,7 +123,7 @@ public void testRepositoryCredentialsOverrideSecureCredentials() { assertThat(repositories.repository(repositoryName), instanceOf(S3Repository.class)); final S3Repository repository = (S3Repository) repositories.repository(repositoryName); - final AmazonS3 client = repository.createBlobStore().clientReference().client(); + final AmazonS3 client = repository.createBlobStore().clientReference().get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); @@ -162,7 +162,7 @@ public void testReinitSecureCredentials() { final S3Repository repository = (S3Repository) repositories.repository(repositoryName); try (AmazonS3Reference clientReference = ((S3BlobStore) repository.blobStore()).clientReference()) { - final AmazonS3 client = clientReference.client(); + final AmazonS3 client = clientReference.get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); @@ -202,7 +202,7 @@ public void testReinitSecureCredentials() { // check credentials have been updated try (AmazonS3Reference clientReference = ((S3BlobStore) repository.blobStore()).clientReference()) { - final AmazonS3 client = clientReference.client(); + final AmazonS3 client = clientReference.get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials newCredentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java index ea0b554df880e..462ed5377ff9a 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java @@ -37,8 +37,12 @@ import com.amazonaws.services.s3.AmazonS3Client; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsException; import org.opensearch.test.OpenSearchTestCase; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.Locale; import java.util.Map; import static org.hamcrest.Matchers.contains; @@ -55,10 +59,7 @@ public void testThereIsADefaultClientByDefault() { assertThat(defaultSettings.credentials, nullValue()); assertThat(defaultSettings.endpoint, is(emptyString())); assertThat(defaultSettings.protocol, is(Protocol.HTTPS)); - assertThat(defaultSettings.proxyHost, is(emptyString())); - assertThat(defaultSettings.proxyPort, is(80)); - assertThat(defaultSettings.proxyUsername, is(emptyString())); - assertThat(defaultSettings.proxyPassword, is(emptyString())); + assertThat(defaultSettings.proxySettings, is(ProxySettings.NO_PROXY_SETTINGS)); assertThat(defaultSettings.readTimeoutMillis, is(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT)); assertThat(defaultSettings.maxRetries, is(ClientConfiguration.DEFAULT_RETRY_POLICY.getMaxErrorRetry())); assertThat(defaultSettings.throttleRetries, is(ClientConfiguration.DEFAULT_THROTTLE_RETRIES)); @@ -215,4 +216,77 @@ public void testSignerOverrideCanBeSet() { ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other")); assertThat(configuration.getSignerOverride(), is(signerOverride)); } + + public void testSetProxySettings() throws Exception { + final int port = randomIntBetween(10, 1080); + final String userName = randomAlphaOfLength(10); + final String password = randomAlphaOfLength(10); + final String proxyType = randomFrom("http", "https", "socks"); + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.proxy.username", userName); + secureSettings.setString("s3.client.default.proxy.password", password); + + final Settings settings = Settings.builder() + .put("s3.client.default.proxy.type", proxyType) + .put("s3.client.default.proxy.host", randomFrom("127.0.0.10")) + .put("s3.client.default.proxy.port", randomFrom(port)) + .setSecureSettings(secureSettings) + .build(); + + final S3ClientSettings s3ClientSettings = S3ClientSettings.load(settings).get("default"); + + assertEquals(ProxySettings.ProxyType.valueOf(proxyType.toUpperCase(Locale.ROOT)), s3ClientSettings.proxySettings.getType()); + assertEquals(new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port), s3ClientSettings.proxySettings.getAddress()); + assertEquals(userName, s3ClientSettings.proxySettings.getUsername()); + assertEquals(password, s3ClientSettings.proxySettings.getPassword()); + } + + public void testProxyWrongHost() { + final Settings settings = Settings.builder() + .put("s3.client.default.proxy.type", randomFrom("socks", "http")) + .put("s3.client.default.proxy.host", "thisisnotavalidhostorwehavebeensuperunlucky") + .put("s3.client.default.proxy.port", 8080) + .build(); + final SettingsException e = expectThrows(SettingsException.class, () -> S3ClientSettings.load(settings)); + assertEquals("S3 proxy host is unknown.", e.getMessage()); + } + + public void testProxyTypeNotSet() { + final Settings hostPortSettings = Settings.builder() + .put("s3.client.default.proxy.host", "127.0.0.1") + .put("s3.client.default.proxy.port", 8080) + .build(); + + SettingsException e = expectThrows(SettingsException.class, () -> S3ClientSettings.load(hostPortSettings)); + assertEquals("S3 proxy port or host or username or password have been set but proxy type is not defined.", e.getMessage()); + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.proxy.username", "aaaa"); + secureSettings.setString("s3.client.default.proxy.password", "bbbb"); + final Settings usernamePasswordSettings = Settings.builder().setSecureSettings(secureSettings).build(); + + e = expectThrows(SettingsException.class, () -> S3ClientSettings.load(usernamePasswordSettings)); + assertEquals("S3 proxy port or host or username or password have been set but proxy type is not defined.", e.getMessage()); + } + + public void testProxyHostNotSet() { + final Settings settings = Settings.builder() + .put("s3.client.default.proxy.port", 8080) + .put("s3.client.default.proxy.type", randomFrom("socks", "http", "https")) + .build(); + final SettingsException e = expectThrows(SettingsException.class, () -> S3ClientSettings.load(settings)); + assertEquals("S3 proxy type has been set but proxy host or port is not defined.", e.getMessage()); + } + + public void testSocksDoesNotSupportForHttpProtocol() { + final Settings settings = Settings.builder() + .put("s3.client.default.proxy.host", "127.0.0.1") + .put("s3.client.default.proxy.port", 8080) + .put("s3.client.default.protocol", "http") + .put("s3.client.default.proxy.type", "socks") + .build(); + expectThrows(SettingsException.class, () -> S3ClientSettings.load(settings)); + } + } diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java index c7d1cb43bd266..0f40a7b3392e8 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java @@ -109,7 +109,7 @@ private S3RetryingInputStream createInputStream(final byte[] data, @Nullable fin final AmazonS3 client = mock(AmazonS3.class); when(client.getObject(any(GetObjectRequest.class))).thenReturn(s3Object); final AmazonS3Reference clientReference = mock(AmazonS3Reference.class); - when(clientReference.client()).thenReturn(client); + when(clientReference.get()).thenReturn(client); final S3BlobStore blobStore = mock(S3BlobStore.class); when(blobStore.clientReference()).thenReturn(clientReference); diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 691115864de16..19dc72c0c784f 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -40,7 +40,7 @@ apply plugin: 'opensearch.testclusters' apply plugin: 'opensearch.standalone-test' dependencies { - testImplementation 'com.google.jimfs:jimfs:1.1' + testImplementation 'com.google.jimfs:jimfs:1.2' } // TODO: give each evil test its own fresh JVM for more isolation. @@ -59,8 +59,17 @@ thirdPartyAudit { 'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64$1', 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.hash.Striped64', + 'com.google.common.hash.Striped64$1', + 'com.google.common.hash.Striped64$Cell', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1' + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1' ) } diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java index 629e325427162..a67c5581cba92 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java @@ -1335,45 +1335,6 @@ public void testTurnOffTranslogRetentionAfterUpgraded() throws Exception { } } - public void testRecoveryWithTranslogRetentionDisabled() throws Exception { - if (isRunningAgainstOldCluster()) { - final Settings.Builder settings = Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1); - if (minimumNodeVersion().before(Version.V_2_0_0)) { - settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); - } - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1"); - } - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING.getKey(), "1kb"); - } - createIndex(index, settings.build()); - ensureGreen(index); - int numDocs = randomIntBetween(0, 100); - for (int i = 0; i < numDocs; i++) { - indexDocument(Integer.toString(i)); - if (rarely()) { - flush(index, randomBoolean()); - } - } - client().performRequest(new Request("POST", "/" + index + "/_refresh")); - if (randomBoolean()) { - ensurePeerRecoveryRetentionLeasesRenewedAndSynced(index); - } - if (randomBoolean()) { - flush(index, randomBoolean()); - } else if (randomBoolean()) { - syncedFlush(index, randomBoolean()); - } - saveInfoDocument("doc_count", Integer.toString(numDocs)); - } - ensureGreen(index); - final int numDocs = Integer.parseInt(loadInfoDocument("doc_count")); - assertTotalHits(numDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); - } - public void testResize() throws Exception { int numDocs; if (isRunningAgainstOldCluster()) { diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml index 1956cd56e6850..f83c098e05741 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml @@ -78,5 +78,4 @@ - do: indices.get: index: queries - include_type_name: false - match: { queries.mappings.properties.id.type: "keyword" } diff --git a/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java b/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java index e3503e2f065b1..a13d406f7b133 100644 --- a/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java +++ b/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java @@ -226,7 +226,7 @@ private static void indexTestData() { // Make sure we have a few segments BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int j = 0; j < 20; j++) { - bulkRequestBuilder.add(client().prepareIndex("test", "_doc", Integer.toString(i * 5 + j)).setSource("field", "value")); + bulkRequestBuilder.add(client().prepareIndex("test").setId(Integer.toString(i * 5 + j)).setSource("field", "value")); } assertNoFailures(bulkRequestBuilder.get()); } diff --git a/qa/translog-policy/build.gradle b/qa/translog-policy/build.gradle deleted file mode 100644 index 5ef7774045e16..0000000000000 --- a/qa/translog-policy/build.gradle +++ /dev/null @@ -1,117 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.opensearch.gradle.Version -import org.opensearch.gradle.info.BuildParams -import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask - -apply plugin: 'opensearch.testclusters' -apply plugin: 'opensearch.standalone-test' -apply from : "$rootDir/gradle/bwc-test.gradle" - -for (Version bwcVersion : BuildParams.bwcVersions.indexCompatible) { - if (bwcVersion.before('6.3.0')) { - // explicitly running restart on the current node does not work in step 2 - // below when plugins are installed, which is the case for some plugins - // prior to 6.3.0 - continue - } - String baseName = "v${bwcVersion}" - - testClusters { - "${baseName}" { - versions = [bwcVersion.toString(), project.version] - numberOfNodes = 2 - setting 'http.content_type.required', 'true' - } - } - - tasks.register("${baseName}#Step1OldClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - mustRunAfter(precommit) - systemProperty 'tests.test_step', 'step1' - systemProperty 'tests.is_old_cluster', 'true' - } - - tasks.register("${baseName}#Step2OldClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step1OldClusterTest" - doFirst { - testClusters."${baseName}".fullRestart() - } - systemProperty 'tests.test_step', 'step2' - systemProperty 'tests.is_old_cluster', 'true' - } - - tasks.register("${baseName}#Step3NewClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step2OldClusterTest" - doFirst { - testClusters."${baseName}".goToNextVersion() - } - systemProperty 'tests.test_step', 'step3' - systemProperty 'tests.is_old_cluster', 'false' - } - - tasks.register("${baseName}#Step4NewClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step3NewClusterTest" - doFirst { - testClusters."${baseName}".fullRestart() - } - systemProperty 'tests.test_step', 'step4' - systemProperty 'tests.is_old_cluster', 'false' - } - - String oldVersion = bwcVersion.toString().minus("-SNAPSHOT") - tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach { - it.systemProperty 'tests.old_cluster_version', oldVersion - it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}") - it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}") - } - - tasks.register(bwcTaskName(bwcVersion)) { - dependsOn tasks.named("${baseName}#Step4NewClusterTest") - } -} - -configurations { - testArtifacts.extendsFrom testRuntime -} - -task testJar(type: Jar) { - archiveAppendix = 'test' - from sourceSets.test.output -} - -artifacts { - testArtifacts testJar -} diff --git a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java index 5ae9944429d21..0dc62b160ff3f 100644 --- a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java +++ b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java @@ -98,6 +98,7 @@ public void setType() { type = "_doc"; } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/pull/2225") public void testEmptyIndex() throws Exception { if (TEST_STEP == TestStep.STEP1_OLD_CLUSTER) { final Settings.Builder settings = Settings.builder() @@ -113,6 +114,7 @@ public void testEmptyIndex() throws Exception { assertTotalHits(0, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/pull/2225") public void testRecoverReplica() throws Exception { int numDocs = 100; if (TEST_STEP == TestStep.STEP1_OLD_CLUSTER) { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index 9f651b17ea1b2..4c32974583aac 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -18,25 +18,6 @@ "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path" : "/{index}/{type}/_delete_by_query", - "methods": ["POST"], - "parts": { - "index": { - "required": true, - "type": "list", - "description": "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type": "list", - "description": "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json index 2b9e8617a661c..922183d628ac6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json @@ -22,10 +22,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be expected in the body of the mappings." - }, "wait_for_active_shards":{ "type":"string", "description":"Set the number of active shards to wait for before the operation returns." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index f78b410f5b489..90a1274ecb059 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -22,10 +22,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether to add the type name to the response (default: false)" - }, "local":{ "type":"boolean", "description":"Return local information, do not retrieve the state from master node (default: false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json index ee96dfcc21ccd..0e71b6d395777 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json @@ -38,10 +38,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "include_defaults":{ "type":"boolean", "description":"Whether the default mapping values should be returned as well" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json index 9e07ae663ff8f..337016763ad0a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json @@ -28,10 +28,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "flat_settings":{ "type":"boolean", "description":"Return settings in flat format (default: false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json index 701a722d89eb8..75a328af929ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json @@ -23,10 +23,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "order":{ "type":"number", "description":"The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower numbers)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index 4ed1f9b490969..fef1f03d1c9a7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -38,10 +38,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be included in the body of the mappings." - }, "timeout":{ "type":"time", "description":"Explicit operation timeout" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json index e3e6ef57e42c8..3a3a6ebe1bff5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json @@ -15,34 +15,16 @@ ] }, { - "path":"/{index}/_msearch", - "methods":[ + "path": "/{index}/_msearch", + "methods": [ "GET", "POST" ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names to use as default" - } - } - }, - { - "path" : "/{index}/{type}/_msearch", - "methods": ["GET", "POST"], "parts": { "index": { - "type" : "list", - "description" : "A comma-separated list of index names to use as default" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to use as default" + "type": "list", + "description": "A comma-separated list of index names to use as default" } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" } } ] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json index 8eb300c975932..7ac194f91bf56 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json @@ -26,24 +26,6 @@ "description":"A comma-separated list of index names to use as default" } } - }, - { - "path" : "/{index}/{type}/_msearch/template", - "methods": ["GET", "POST"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names to use as default" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to use as default" - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json b/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json index 93dee177e8026..d5fc7371e0898 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json @@ -26,25 +26,6 @@ "description":"The index in which the document resides." } } - }, - { - "path" : "/{index}/{type}/_mtermvectors", - "methods" : ["GET", "POST"], - "parts" : { - "index" : { - "type" : "string", - "description" : "The index in which the document resides." - }, - "type" : { - "type" : "string", - "description" : "The type of the document." - } - }, - "deprecated":{ - "version" : "7.0.0", - - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index 7770acc52f978..ac321acf8907b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -26,24 +26,6 @@ "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path" : "/{index}/{type}/_search", - "methods": ["GET", "POST"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type": "list", - "description": "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "deprecated" : { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json index 00bd09729c908..4230b660523b8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json @@ -26,24 +26,6 @@ "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path" : "/{index}/{type}/_search/template", - "methods": ["GET", "POST"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "deprecated" : { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json b/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json index dd7fac97d79a7..b6cb3663c2df2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json @@ -36,50 +36,6 @@ "description":"The index in which the document resides." } } - }, - { - "path" : "/{index}/{type}/{id}/_termvectors", - "methods" : ["GET", "POST"], - "parts" : { - "index" : { - "type" : "string", - "description" : "The index in which the document resides.", - "required" : true - }, - "type" : { - "type" : "string", - "description" : "The type of the document.", - "required" : false - }, - "id" : { - "type" : "string", - "description" : "The id of the document, when not specified a doc param should be supplied." - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } - }, - { - "path" : "/{index}/{type}/_termvectors", - "methods" : ["GET", "POST"], - "parts" : { - "index" : { - "type" : "string", - "description" : "The index in which the document resides.", - "required" : true - }, - "type" : { - "type" : "string", - "description" : "The type of the document.", - "required" : false - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index 930f1a9700076..71a0c1fc8ad95 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -18,25 +18,6 @@ "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path" : "/{index}/{type}/_update_by_query", - "methods": ["POST"], - "parts": { - "index": { - "required": true, - "type": "list", - "description": "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type": "list", - "description": "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "deprecated" : { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml index f1ae5c89e52a5..45e9a969c5982 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml @@ -151,7 +151,6 @@ setup: "Get date_nanos field caps": - do: indices.create: - include_type_name: false index: test_nanos body: mappings: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml index ce8a6604069ed..0f8c7a7a68f07 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -112,20 +112,3 @@ properties: "": type: keyword - ---- -"Create index with explicit _doc type": - - do: - catch: bad_request - indices.create: - index: test_index - body: - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml deleted file mode 100644 index 85267f49b1317..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml +++ /dev/null @@ -1,70 +0,0 @@ ---- -setup: - - - do: - indices.create: - include_type_name: true - index: test_index - body: - aliases: - test_alias: {} - test_blias: {} - mappings: - type_1: {} - settings: - number_of_shards: 1 - number_of_replicas: 1 - - - do: - indices.create: - index: test_index_2 - body: - settings: - number_of_shards: 1 - number_of_replicas: 2 - aliases: - test_alias: {} - test_blias: {} - - - do: - indices.create: - index: test_index_3 - body: - aliases: - test_alias: {} - test_blias: {} - - - do: - indices.close: - index: test_index_3 - - - do: - cluster.health: - wait_for_status: yellow - ---- -"Test include_type_name": - - do: - indices.get: - include_type_name: true - index: test_index - - - is_true: test_index.mappings - - is_true: test_index.mappings.type_1 - - - do: - indices.get: - include_type_name: false - index: test_index - - - is_true: test_index.mappings - - is_false: test_index.mappings.type_1 - ---- -"Test include_type_name dafaults to false": - - do: - indices.get: - index: test_index - - - is_true: test_index.mappings - - is_false: test_index.mappings.type_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml deleted file mode 100644 index 0ecf304b1ce70..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml +++ /dev/null @@ -1,48 +0,0 @@ -setup: - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: keyword - ---- -"Get template": - - - do: - indices.get_template: - include_type_name: true - name: test - - - match: {test.index_patterns: ["test-*"]} - - match: {test.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} - - match: {test.mappings: {_doc: {properties: {field: {type: keyword}}}}} - ---- -"Get template with no mappings": - - - do: - indices.put_template: - name: test_no_mappings - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - - - do: - indices.get_template: - include_type_name: true - name: test_no_mappings - - - match: {test_no_mappings.index_patterns: ["test-*"]} - - match: {test_no_mappings.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} - - match: {test_no_mappings.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml index 0b4e34d2740b5..5b40ad0771c70 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml @@ -226,20 +226,3 @@ indices.put_template: name: test body: {} - ---- -"Put template with explicit _doc type": - - do: - catch: bad_request - indices.put_template: - name: test - body: - index_patterns: test-* - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml deleted file mode 100644 index fde28db3c691d..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml +++ /dev/null @@ -1,74 +0,0 @@ ---- -"Put template": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: keyword - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.index_patterns: ["test-*"]} - - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - - match: {test.mappings: {_doc: {properties: {field: {type: keyword}}}}} - ---- -"Put multiple template": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: [test-*, test2-*] - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: text - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.index_patterns: ["test-*", "test2-*"]} - - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - - match: {test.mappings: {_doc: {properties: {field: {type: text}}}}} - ---- -"Put template with empty mappings": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: {} - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml index 040ffd534c0ab..b669c2ab75176 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml @@ -37,28 +37,3 @@ - match: { conditions: { "[max_docs: 2]": true } } - match: { rolled_over: true } - ---- -"Mappings with explicit _doc type": - - do: - indices.create: - index: logs-1 - body: - aliases: - logs_search: {} - - - do: - catch: bad_request - indices.rollover: - alias: "logs_search" - body: - conditions: - max_docs: 2 - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.caused_by.type: "illegal_argument_exception" } - - match: { error.caused_by.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml deleted file mode 100644 index b14b5f94ebbc2..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -"mtermvectors without types on an index that has types": - - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type : "text" - term_vector : "with_positions_offsets" - - - do: - index: - index: index - id: 1 - body: { foo: bar } - - - do: - mtermvectors: - body: - docs: - - _index: index - _id: 1 - - - match: {docs.0.term_vectors.foo.terms.bar.term_freq: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml index 869214f9111c6..7c7a223044725 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml @@ -820,6 +820,10 @@ setup: { "str": "cow", "number": 1 } { "index": {} } { "str": "pig", "number": 1 } + - do: + indices.forcemerge: + index: test_1 + max_num_segments: 1 - do: search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml index 534e552fc0ea2..1368c87a77d7e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml @@ -1,7 +1,6 @@ setup: - do: indices.create: - include_type_name: false index: test_1 body: settings: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml index 2db498a0cacf0..dfd5b6c5f2583 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml @@ -4,7 +4,6 @@ setup: reason: "added in 7.0.0" - do: indices.create: - include_type_name: false index: test_1 body: settings: diff --git a/server/build.gradle b/server/build.gradle index aa467cd0528bf..3a11428ca7919 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -64,14 +64,14 @@ if (!isEclipse) { } compileJava11Java { - sourceCompatibility = 11 - targetCompatibility = 11 + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 } tasks.named('forbiddenApisJava11').configure { doFirst { if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) { - targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() + targetCompatibility = JavaVersion.VERSION_11 } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java index eeee000fa9c2d..17366cf0d08fc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java @@ -258,7 +258,7 @@ public void testUpdate() { interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); - client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); + client().prepareIndex(indexOrAlias).setId("id").setSource("field", "value").get(); UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); @@ -288,7 +288,7 @@ public void testUpdateDelete() { interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); - client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); + client().prepareIndex(indexOrAlias).setId("id").setSource("field", "value").get(); UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "id").script( new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op='delete'", Collections.emptyMap()) ); @@ -545,8 +545,7 @@ public void testGetMappings() { public void testPutMapping() { interceptTransportActions(PutMappingAction.NAME); - PutMappingRequest putMappingRequest = new PutMappingRequest(randomUniqueIndicesOrAliases()).type("type") - .source("field", "type=text"); + PutMappingRequest putMappingRequest = new PutMappingRequest(randomUniqueIndicesOrAliases()).source("field", "type=text"); internalCluster().coordOnlyNodeClient().admin().indices().putMapping(putMappingRequest).actionGet(); clearInterceptedActions(); @@ -584,7 +583,7 @@ public void testSearchQueryThenFetch() throws Exception { String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { - client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); + client().prepareIndex(randomIndicesOrAliases[i]).setId("id-" + i).setSource("field", "value").get(); } refresh(); @@ -609,7 +608,7 @@ public void testSearchDfsQueryThenFetch() throws Exception { String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { - client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); + client().prepareIndex(randomIndicesOrAliases[i]).setId("id-" + i).setSource("field", "value").get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/RejectionActionIT.java b/server/src/internalClusterTest/java/org/opensearch/action/RejectionActionIT.java index e94167fb71552..f930b9e9cfda0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/RejectionActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/RejectionActionIT.java @@ -69,7 +69,7 @@ protected Settings nodeSettings(int nodeOrdinal) { public void testSimulatedSearchRejectionLoad() throws Throwable { for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "1").get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "1").get(); } int numberOfAsyncOps = randomIntBetween(200, 700); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/HotThreadsIT.java index 24d389d8ea03e..ab44c95b4f5a6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/HotThreadsIT.java @@ -128,9 +128,9 @@ public void onFailure(Exception e) { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "value3") + client().prepareIndex("test").setId("1").setSource("field1", "value1"), + client().prepareIndex("test").setId("2").setSource("field1", "value2"), + client().prepareIndex("test").setId("3").setSource("field1", "value3") ); ensureSearchable(); while (latch.getCount() > 0) { diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java index 9c3cf4c28e55e..e1346492999be 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java @@ -318,10 +318,8 @@ public void testTransportBulkTasks() { createIndex("test"); ensureGreen("test"); // Make sure all shards are allocated to catch replication tasks // ensures the mapping is available on all nodes so we won't retry the request (in case replicas don't have the right mapping). - client().admin().indices().preparePutMapping("test").setType("doc").setSource("foo", "type=keyword").get(); - client().prepareBulk() - .add(client().prepareIndex("test", "doc", "test_id").setSource("{\"foo\": \"bar\"}", XContentType.JSON)) - .get(); + client().admin().indices().preparePutMapping("test").setSource("foo", "type=keyword").get(); + client().prepareBulk().add(client().prepareIndex("test").setId("test_id").setSource("{\"foo\": \"bar\"}", XContentType.JSON)).get(); // the bulk operation should produce one main task List topTask = findEvents(BulkAction.NAME, Tuple::v1); @@ -370,7 +368,8 @@ public void testSearchTaskDescriptions() { registerTaskManagerListeners(SearchAction.NAME + "[*]"); // shard task createIndex("test"); ensureGreen("test"); // Make sure all shards are allocated to catch replication tasks - client().prepareIndex("test", "doc", "test_id") + client().prepareIndex("test") + .setId("test_id") .setSource("{\"foo\": \"bar\"}", XContentType.JSON) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -475,7 +474,7 @@ public void waitForTaskCompletion(Task task) {} } // Need to run the task in a separate thread because node client's .execute() is blocked by our task listener index = new Thread(() -> { - IndexResponse indexResponse = client().prepareIndex("test", "test").setSource("test", "test").get(); + IndexResponse indexResponse = client().prepareIndex("test").setSource("test", "test").get(); assertArrayEquals(ReplicationResponse.EMPTY, indexResponse.getShardInfo().getFailures()); }); index.start(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java index cc855696d5f8e..1731c607a066d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java @@ -73,11 +73,11 @@ protected void setUpRepository() throws Exception { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex(INDEX_NAME, "type").setSource("test", "init").execute().actionGet(); + client().prepareIndex(INDEX_NAME).setSource("test", "init").execute().actionGet(); } docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex(OTHER_INDEX_NAME, "type").setSource("test", "init").execute().actionGet(); + client().prepareIndex(OTHER_INDEX_NAME).setSource("test", "init").execute().actionGet(); } logger.info("--> register a repository"); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CloneIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CloneIndexIT.java index 11a3c6708ec93..98fc6483703c4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CloneIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CloneIndexIT.java @@ -62,7 +62,7 @@ public void testCreateCloneIndex() { ).get(); final int docs = randomIntBetween(0, 128); for (int i = 0; i < docs; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } internalCluster().ensureAtLeastNumDataNodes(2); // ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node @@ -122,7 +122,7 @@ public void testCreateCloneIndex() { } for (int i = docs; i < 2 * docs; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertHitCount( diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java index cee5fd15d54c2..311767d82ac6c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java @@ -148,12 +148,9 @@ public void testNonNestedMappings() throws Exception { GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); - ImmutableOpenMap mappings = response.mappings().get("test"); + MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); - - MappingMetadata metadata = mappings.get("_doc"); - assertNotNull(metadata); - assertFalse(metadata.sourceAsMap().isEmpty()); + assertFalse(mappings.sourceAsMap().isEmpty()); } public void testEmptyNestedMappings() throws Exception { @@ -161,12 +158,10 @@ public void testEmptyNestedMappings() throws Exception { GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); - ImmutableOpenMap mappings = response.mappings().get("test"); + MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); - MappingMetadata metadata = mappings.get("_doc"); - assertNotNull(metadata); - assertTrue(metadata.sourceAsMap().isEmpty()); + assertTrue(mappings.sourceAsMap().isEmpty()); } public void testMappingParamAndNestedMismatch() throws Exception { @@ -190,12 +185,9 @@ public void testEmptyMappings() throws Exception { GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); - ImmutableOpenMap mappings = response.mappings().get("test"); + MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); - - MappingMetadata metadata = mappings.get("_doc"); - assertNotNull(metadata); - assertTrue(metadata.sourceAsMap().isEmpty()); + assertTrue(mappings.sourceAsMap().isEmpty()); } public void testInvalidShardCountSettings() throws Exception { @@ -276,7 +268,7 @@ public void testCreateAndDeleteIndexConcurrently() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "test").setSource("index_version", indexVersion.get()).get(); + client().prepareIndex("test").setSource("index_version", indexVersion.get()).get(); } synchronized (indexVersionLock) { // not necessarily needed here but for completeness we lock here too indexVersion.incrementAndGet(); @@ -289,7 +281,7 @@ public void onResponse(AcknowledgedResponse deleteIndexResponse) { public void run() { try { // recreate that index - client().prepareIndex("test", "test").setSource("index_version", indexVersion.get()).get(); + client().prepareIndex("test").setSource("index_version", indexVersion.get()).get(); synchronized (indexVersionLock) { // we sync here since we have to ensure that all indexing operations below for a given ID are done before // we increment the index version otherwise a doc that is in-flight could make it into an index that it @@ -315,7 +307,7 @@ public void onFailure(Exception e) { for (int i = 0; i < numDocs; i++) { try { synchronized (indexVersionLock) { - client().prepareIndex("test", "test") + client().prepareIndex("test") .setSource("index_version", indexVersion.get()) .setTimeout(TimeValue.timeValueSeconds(10)) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java index a1ddc4a27a1f9..ef5c56c50ed83 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java @@ -71,6 +71,7 @@ import org.opensearch.index.Index; import org.opensearch.index.IndexService; import org.opensearch.index.engine.SegmentsStats; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.shard.IndexShard; @@ -107,7 +108,8 @@ public void testCreateShrinkIndexToN() { internalCluster().ensureAtLeastNumDataNodes(2); prepareCreate("source").setSettings(Settings.builder().put(indexSettings()).put("number_of_shards", shardSplits[0])).get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("source", "t1", Integer.toString(i)) + client().prepareIndex("source") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON) .get(); } @@ -150,7 +152,8 @@ public void testCreateShrinkIndexToN() { assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20); for (int i = 0; i < 20; i++) { // now update - client().prepareIndex("first_shrink", "t1", Integer.toString(i)) + client().prepareIndex("first_shrink") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON) .get(); } @@ -192,7 +195,8 @@ public void testCreateShrinkIndexToN() { assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20); for (int i = 0; i < 20; i++) { // now update - client().prepareIndex("second_shrink", "t1", Integer.toString(i)) + client().prepareIndex("second_shrink") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON) .get(); } @@ -292,7 +296,7 @@ public void testCreateShrinkIndex() { ).get(); final int docs = randomIntBetween(0, 128); for (int i = 0; i < docs; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } ImmutableOpenMap dataNodes = client().admin() .cluster() @@ -393,7 +397,7 @@ public void testCreateShrinkIndex() { } for (int i = docs; i < 2 * docs; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 2 * docs); @@ -420,7 +424,7 @@ public void testCreateShrinkIndexFails() throws Exception { Settings.builder().put(indexSettings()).put("number_of_shards", randomIntBetween(2, 7)).put("number_of_replicas", 0) ).get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } ImmutableOpenMap dataNodes = client().admin() .cluster() @@ -523,9 +527,10 @@ public void testCreateShrinkWithIndexSort() throws Exception { .put("sort.order", "desc") .put("number_of_shards", 8) .put("number_of_replicas", 0) - ).addMapping("type", "id", "type=keyword,doc_values=true").get(); + ).addMapping(MapperService.SINGLE_MAPPING_NAME, "id", "type=keyword,doc_values=true").get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("source", "type", Integer.toString(i)) + client().prepareIndex("source") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"id\" : " + i + "}", XContentType.JSON) .get(); } @@ -595,7 +600,7 @@ public void testCreateShrinkWithIndexSort() throws Exception { // ... and that the index sort is also applied to updates for (int i = 20; i < 40; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertSortedSegments("target", expectedIndexSort); @@ -606,7 +611,7 @@ public void testShrinkCommitsMergeOnIdle() throws Exception { Settings.builder().put(indexSettings()).put("index.number_of_replicas", 0).put("number_of_shards", 5) ).get(); for (int i = 0; i < 30; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } client().admin().indices().prepareFlush("source").get(); ImmutableOpenMap dataNodes = client().admin() diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java index 14d337c34daa5..42b1d5f4a757f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java @@ -65,6 +65,7 @@ import org.opensearch.index.Index; import org.opensearch.index.IndexService; import org.opensearch.index.engine.SegmentsStats; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.shard.IndexShard; @@ -135,12 +136,12 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha int numRoutingShards = MetadataCreateIndexService.calculateNumRoutingShards(secondSplitShards, Version.CURRENT) - 1; settings.put("index.routing_partition_size", randomIntBetween(1, numRoutingShards)); if (useNested) { - createInitialIndex.addMapping("t1", "_routing", "required=true", "nested1", "type=nested"); + createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "_routing", "required=true", "nested1", "type=nested"); } else { - createInitialIndex.addMapping("t1", "_routing", "required=true"); + createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "_routing", "required=true"); } } else if (useNested) { - createInitialIndex.addMapping("t1", "nested1", "type=nested"); + createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "nested1", "type=nested"); } logger.info("use routing {} use mixed routing {} use nested {}", useRouting, useMixedRouting, useNested); createInitialIndex.setSettings(settings).get(); @@ -150,7 +151,8 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha BiFunction indexFunc = (index, id) -> { try { - return client().prepareIndex(index, "t1", Integer.toString(id)) + return client().prepareIndex(index) + .setId(Integer.toString(id)) .setSource( jsonBuilder().startObject() .field("foo", "bar") @@ -402,7 +404,7 @@ public void testCreateSplitIndex() throws Exception { ).get(); final int docs = randomIntBetween(0, 128); for (int i = 0; i < docs; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } // ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node // if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due @@ -486,7 +488,7 @@ public void testCreateSplitIndex() throws Exception { } for (int i = docs; i < 2 * docs; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertHitCount( @@ -521,9 +523,10 @@ public void testCreateSplitWithIndexSort() throws Exception { .put("sort.order", "desc") .put("number_of_shards", 2) .put("number_of_replicas", 0) - ).addMapping("type", "id", "type=keyword,doc_values=true").get(); + ).addMapping(MapperService.SINGLE_MAPPING_NAME, "id", "type=keyword,doc_values=true").get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("source", "type", Integer.toString(i)) + client().prepareIndex("source") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"id\" : " + i + "}", XContentType.JSON) .get(); } @@ -580,7 +583,7 @@ public void testCreateSplitWithIndexSort() throws Exception { // ... and that the index sort is also applied to updates for (int i = 20; i < 40; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertSortedSegments("target", expectedIndexSort); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/delete/DeleteIndexBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/delete/DeleteIndexBlocksIT.java index 0dd4ff1ba863c..1ab5826329c8f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/delete/DeleteIndexBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/delete/DeleteIndexBlocksIT.java @@ -58,14 +58,14 @@ public void testDeleteIndexWithBlocks() { public void testDeleteIndexOnIndexReadOnlyAllowDeleteSetting() { createIndex("test"); ensureGreen("test"); - client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar").get(); + client().prepareIndex().setIndex("test").setId("1").setSource("foo", "bar").get(); refresh(); try { Settings settings = Settings.builder().put(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, true).build(); assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get()); assertSearchHits(client().prepareSearch().get(), "1"); assertBlocked( - client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"), + client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK ); assertBlocked( @@ -95,7 +95,7 @@ public void testClusterBlockMessageHasIndexName() { client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); ClusterBlockException e = expectThrows( ClusterBlockException.class, - () -> client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar").get() + () -> client().prepareIndex().setIndex("test").setId("1").setSource("foo", "bar").get() ); assertEquals( "index [test] blocked by: [TOO_MANY_REQUESTS/12/disk usage exceeded flood-stage watermark, " @@ -116,14 +116,14 @@ public void testClusterBlockMessageHasIndexName() { public void testDeleteIndexOnClusterReadOnlyAllowDeleteSetting() { createIndex("test"); ensureGreen("test"); - client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar").get(); + client().prepareIndex().setIndex("test").setId("1").setSource("foo", "bar").get(); refresh(); try { Settings settings = Settings.builder().put(Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true).build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); assertSearchHits(client().prepareSearch().get(), "1"); assertBlocked( - client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"), + client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), Metadata.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK ); assertBlocked( diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/flush/FlushBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/flush/FlushBlocksIT.java index 07fc8b9cac124..f780f505a6557 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/flush/FlushBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/flush/FlushBlocksIT.java @@ -55,7 +55,7 @@ public void testFlushWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test").setId("" + i).setSource("test", "init").execute().actionGet(); } // Request is not blocked diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java index 3a5de998c9f7b..b279623c46969 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java @@ -57,7 +57,7 @@ public void testForceMergeWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test").setId("" + i).setSource("test", "init").execute().actionGet(); } // Request is not blocked diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java index a31976c969aaa..195817bf04cc9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java @@ -32,11 +32,13 @@ package org.opensearch.action.admin.indices.forcemerge; +import org.apache.lucene.index.IndexCommit; import org.opensearch.action.admin.indices.flush.FlushResponse; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.routing.IndexRoutingTable; import org.opensearch.cluster.routing.IndexShardRoutingTable; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.index.Index; import org.opensearch.index.engine.Engine; @@ -99,8 +101,8 @@ public void testForceMergeUUIDConsistent() throws IOException { } private static String getForceMergeUUID(IndexShard indexShard) throws IOException { - try (Engine.IndexCommitRef indexCommitRef = indexShard.acquireLastIndexCommit(true)) { - return indexCommitRef.getIndexCommit().getUserData().get(Engine.FORCE_MERGE_UUID_KEY); + try (GatedCloseable wrappedIndexCommit = indexShard.acquireLastIndexCommit(true)) { + return wrappedIndexCommit.get().getUserData().get(Engine.FORCE_MERGE_UUID_KEY); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java index ad6c9ecfb5663..bbe8b616ad87e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java @@ -35,6 +35,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.get.GetIndexRequest.Feature; +import org.opensearch.action.support.IndicesOptions; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; @@ -94,6 +95,19 @@ public void testSimpleUnknownIndex() { } } + public void testUnknownIndexWithAllowNoIndices() { + GetIndexResponse response = client().admin() + .indices() + .prepareGetIndex() + .addIndices("missing_idx") + .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) + .get(); + assertThat(response.indices(), notNullValue()); + assertThat(response.indices().length, equalTo(0)); + assertThat(response.mappings(), notNullValue()); + assertThat(response.mappings().size(), equalTo(0)); + } + public void testEmpty() { GetIndexResponse response = client().admin().indices().prepareGetIndex().addIndices("empty_idx").get(); String[] indices = response.indices(); @@ -263,24 +277,19 @@ private void assertNonEmptySettings(GetIndexResponse response, String indexName) } private void assertMappings(GetIndexResponse response, String indexName) { - ImmutableOpenMap> mappings = response.mappings(); + ImmutableOpenMap mappings = response.mappings(); assertThat(mappings, notNullValue()); assertThat(mappings.size(), equalTo(1)); - ImmutableOpenMap indexMappings = mappings.get(indexName); + MappingMetadata indexMappings = mappings.get(indexName); assertThat(indexMappings, notNullValue()); - assertThat(indexMappings.size(), equalTo(1)); - MappingMetadata mapping = indexMappings.get("type1"); - assertThat(mapping, notNullValue()); - assertThat(mapping.type(), equalTo("type1")); } private void assertEmptyOrOnlyDefaultMappings(GetIndexResponse response, String indexName) { - ImmutableOpenMap> mappings = response.mappings(); + ImmutableOpenMap mappings = response.mappings(); assertThat(mappings, notNullValue()); assertThat(mappings.size(), equalTo(1)); - ImmutableOpenMap indexMappings = mappings.get(indexName); - assertThat(indexMappings, notNullValue()); - assertThat(indexMappings.size(), equalTo(0)); + MappingMetadata indexMappings = mappings.get(indexName); + assertEquals(indexMappings, MappingMetadata.EMPTY_MAPPINGS); } private void assertAliases(GetIndexResponse response, String indexName) { diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/mapping/put/ValidateMappingRequestPluginIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/mapping/put/ValidateMappingRequestPluginIT.java index 3f67495fd746c..fe1bc05dc5f20 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/mapping/put/ValidateMappingRequestPluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/mapping/put/ValidateMappingRequestPluginIT.java @@ -80,40 +80,38 @@ public void testValidateMappingRequest() { allowedOrigins.put("index_2", Arrays.asList("2", "3")); { String origin = randomFrom("", "3", "4", "5"); - PutMappingRequest request = new PutMappingRequest().indices("index_1").type("doc").source("t1", "type=keyword").origin(origin); + PutMappingRequest request = new PutMappingRequest().indices("index_1").source("t1", "type=keyword").origin(origin); Exception e = expectThrows(IllegalStateException.class, () -> client().admin().indices().putMapping(request).actionGet()); assertThat(e.getMessage(), equalTo("not allowed: index[index_1] origin[" + origin + "]")); } { PutMappingRequest request = new PutMappingRequest().indices("index_1") .origin(randomFrom("1", "2")) - .type("doc") .source("t1", "type=keyword"); assertAcked(client().admin().indices().putMapping(request).actionGet()); } { String origin = randomFrom("", "1", "4", "5"); - PutMappingRequest request = new PutMappingRequest().indices("index_2").type("doc").source("t2", "type=keyword").origin(origin); + PutMappingRequest request = new PutMappingRequest().indices("index_2").source("t2", "type=keyword").origin(origin); Exception e = expectThrows(IllegalStateException.class, () -> client().admin().indices().putMapping(request).actionGet()); assertThat(e.getMessage(), equalTo("not allowed: index[index_2] origin[" + origin + "]")); } { PutMappingRequest request = new PutMappingRequest().indices("index_2") .origin(randomFrom("2", "3")) - .type("doc") .source("t1", "type=keyword"); assertAcked(client().admin().indices().putMapping(request).actionGet()); } { String origin = randomFrom("", "1", "3", "4"); - PutMappingRequest request = new PutMappingRequest().indices("*").type("doc").source("t3", "type=keyword").origin(origin); + PutMappingRequest request = new PutMappingRequest().indices("*").source("t3", "type=keyword").origin(origin); Exception e = expectThrows(IllegalStateException.class, () -> client().admin().indices().putMapping(request).actionGet()); assertThat(e.getMessage(), containsString("not allowed:")); } { - PutMappingRequest request = new PutMappingRequest().indices("index_2").origin("2").type("doc").source("t3", "type=keyword"); + PutMappingRequest request = new PutMappingRequest().indices("index_2").origin("2").source("t3", "type=keyword"); assertAcked(client().admin().indices().putMapping(request).actionGet()); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java index 992a4fcb8eab7..df885848d82ce 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java @@ -53,7 +53,7 @@ public void testIndicesSegmentsWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test-blocks", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test-blocks").setId("" + i).setSource("test", "init").execute().actionGet(); } client().admin().indices().prepareFlush("test-blocks").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 714b366fdda8c..ea9f7e0a7232d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -236,7 +236,7 @@ private void indexRandomData(String index) throws ExecutionException, Interrupte int numDocs = scaledRandomIntBetween(10, 20); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(index, "type").setSource("field", "value"); + builders[i] = client().prepareIndex(index).setSource("field", "value"); } indexRandom(true, builders); client().admin().indices().prepareFlush().setForce(true).execute().actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java index ab934170b594a..e2a1363f163da 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java @@ -128,10 +128,10 @@ public void testBulkWithWriteIndexAndRouting() { assertThat(bulkResponse.getItems()[0].getResponse().status(), equalTo(RestStatus.CREATED)); assertThat(client().prepareGet("index3", "id").setRouting("1").get().getSource().get("foo"), equalTo("baz")); - bulkResponse = client().prepareBulk().add(client().prepareUpdate("alias1", "type", "id").setDoc("foo", "updated")).get(); + bulkResponse = client().prepareBulk().add(client().prepareUpdate("alias1", "id").setDoc("foo", "updated")).get(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); assertThat(client().prepareGet("index3", "id").setRouting("1").get().getSource().get("foo"), equalTo("updated")); - bulkResponse = client().prepareBulk().add(client().prepareDelete("alias1", "type", "id")).get(); + bulkResponse = client().prepareBulk().add(client().prepareDelete("alias1", "id")).get(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); assertFalse(client().prepareGet("index3", "id").setRouting("1").get().isExists()); } @@ -200,7 +200,7 @@ public void testDeleteIndexWhileIndexing() throws Exception { while (stopped.get() == false && docID.get() < 5000) { String id = Integer.toString(docID.incrementAndGet()); try { - IndexResponse response = client().prepareIndex(index, "_doc") + IndexResponse response = client().prepareIndex(index) .setId(id) .setSource(Collections.singletonMap("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorClusterSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorClusterSettingsIT.java index 7532e5dc1067c..14531787e9903 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorClusterSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorClusterSettingsIT.java @@ -50,9 +50,9 @@ public void testBulkProcessorAutoCreateRestrictions() throws Exception { client().admin().cluster().prepareHealth("willwork").setWaitForGreenStatus().execute().actionGet(); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); - bulkRequestBuilder.add(client().prepareIndex("willwork", "type1", "1").setSource("{\"foo\":1}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("wontwork", "type1", "2").setSource("{\"foo\":2}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("willwork", "type1", "3").setSource("{\"foo\":3}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("willwork").setId("1").setSource("{\"foo\":1}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("wontwork").setId("2").setSource("{\"foo\":2}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("willwork").setId("3").setSource("{\"foo\":3}", XContentType.JSON)); BulkResponse br = bulkRequestBuilder.get(); BulkItemResponse[] responses = br.getItems(); assertEquals(3, responses.length); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java index 68cb46fd20e50..687a4e9b733fd 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java @@ -57,7 +57,6 @@ @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) public class BulkProcessorRetryIT extends OpenSearchIntegTestCase { private static final String INDEX_NAME = "test"; - private static final String TYPE_NAME = "type"; @Override protected Settings nodeSettings(int nodeOrdinal) { @@ -186,7 +185,6 @@ private static void indexDocs(BulkProcessor processor, int numDocs) { processor.add( client().prepareIndex() .setIndex(INDEX_NAME) - .setType(TYPE_NAME) .setId(Integer.toString(i)) .setSource("field", randomRealisticUnicodeOfLengthBetween(1, 30)) .request() diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java index f2b83fc92cc63..6311ac6876192 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java @@ -137,11 +137,11 @@ public void testBulkUpdateSimple() throws Exception { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("1").setSource("field", 1)) - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("2").setSource("field", 2).setCreate(true)) - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("3").setSource("field", 3)) - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("4").setSource("field", 4)) - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("5").setSource("field", 5)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("1").setSource("field", 1)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("2").setSource("field", 2).setCreate(true)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("3").setSource("field", 3)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("4").setSource("field", 4)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("5").setSource("field", 5)) .execute() .actionGet(); @@ -154,12 +154,11 @@ public void testBulkUpdateSimple() throws Exception { final Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap()); bulkResponse = client().prepareBulk() - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("1").setScript(script)) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2").setScript(script).setRetryOnConflict(3)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setId("1").setScript(script)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setId("2").setScript(script).setRetryOnConflict(3)) .add( client().prepareUpdate() .setIndex(indexOrAlias()) - .setType("type1") .setId("3") .setDoc(jsonBuilder().startObject().field("field1", "test").endObject()) ) @@ -196,13 +195,12 @@ public void testBulkUpdateSimple() throws Exception { .add( client().prepareUpdate() .setIndex(indexOrAlias()) - .setType("type1") .setId("6") .setScript(script) .setUpsert(jsonBuilder().startObject().field("field", 0).endObject()) ) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("7").setScript(script)) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2").setScript(script)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setId("7").setScript(script)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setId("2").setScript(script)) .get(); assertThat(bulkResponse.hasFailures(), equalTo(true)); @@ -273,9 +271,9 @@ public void testBulkWithCAS() throws Exception { createIndex("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build()); ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "1").setCreate(true).setSource("field", "1")) - .add(client().prepareIndex("test", "type", "2").setCreate(true).setSource("field", "1")) - .add(client().prepareIndex("test", "type", "1").setSource("field", "2")) + .add(client().prepareIndex("test").setId("1").setCreate(true).setSource("field", "1")) + .add(client().prepareIndex("test").setId("2").setCreate(true).setSource("field", "1")) + .add(client().prepareIndex("test").setId("1").setSource("field", "2")) .get(); assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult()); @@ -286,19 +284,9 @@ public void testBulkWithCAS() throws Exception { assertThat(bulkResponse.getItems()[2].getResponse().getSeqNo(), equalTo(2L)); bulkResponse = client().prepareBulk() - .add( - client().prepareUpdate("test", "type", "1") - .setIfSeqNo(40L) - .setIfPrimaryTerm(20) - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2") - ) - .add(client().prepareUpdate("test", "type", "2").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) - .add( - client().prepareUpdate("test", "type", "1") - .setIfSeqNo(2L) - .setIfPrimaryTerm(1) - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3") - ) + .add(client().prepareUpdate("test", "1").setIfSeqNo(40L).setIfPrimaryTerm(20).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) + .add(client().prepareUpdate("test", "2").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) + .add(client().prepareUpdate("test", "1").setIfSeqNo(2L).setIfPrimaryTerm(1).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3")) .get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); @@ -306,9 +294,9 @@ public void testBulkWithCAS() throws Exception { assertThat(bulkResponse.getItems()[2].getResponse().getSeqNo(), equalTo(4L)); bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "e1").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) - .add(client().prepareIndex("test", "type", "e2").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) - .add(client().prepareIndex("test", "type", "e1").setSource("field", "2").setVersion(12).setVersionType(VersionType.EXTERNAL)) + .add(client().prepareIndex("test").setId("e1").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) + .add(client().prepareIndex("test").setId("e2").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) + .add(client().prepareIndex("test").setId("e1").setSource("field", "2").setVersion(12).setVersionType(VersionType.EXTERNAL)) .get(); assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult()); @@ -319,18 +307,8 @@ public void testBulkWithCAS() throws Exception { assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(12L)); bulkResponse = client().prepareBulk() - .add( - client().prepareUpdate("test", "type", "e1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2") - .setIfSeqNo(10L) - .setIfPrimaryTerm(1) - ) - .add( - client().prepareUpdate("test", "type", "e1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3") - .setIfSeqNo(20L) - .setIfPrimaryTerm(1) - ) + .add(client().prepareUpdate("test", "e1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2").setIfSeqNo(10L).setIfPrimaryTerm(1)) + .add(client().prepareUpdate("test", "e1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3").setIfSeqNo(20L).setIfPrimaryTerm(1)) .get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); @@ -342,9 +320,9 @@ public void testBulkUpdateMalformedScripts() throws Exception { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex().setIndex("test").setType("type1").setId("1").setSource("field", 1)) - .add(client().prepareIndex().setIndex("test").setType("type1").setId("2").setSource("field", 1)) - .add(client().prepareIndex().setIndex("test").setType("type1").setId("3").setSource("field", 1)) + .add(client().prepareIndex().setIndex("test").setId("1").setSource("field", 1)) + .add(client().prepareIndex().setIndex("test").setId("2").setSource("field", 1)) + .add(client().prepareIndex().setIndex("test").setId("3").setSource("field", 1)) .execute() .actionGet(); @@ -355,7 +333,6 @@ public void testBulkUpdateMalformedScripts() throws Exception { .add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId("1") .setFetchSource("field", null) .setScript( @@ -370,7 +347,6 @@ public void testBulkUpdateMalformedScripts() throws Exception { .add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId("2") .setFetchSource("field", null) .setScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap())) @@ -378,7 +354,6 @@ public void testBulkUpdateMalformedScripts() throws Exception { .add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId("3") .setFetchSource("field", null) .setScript( @@ -425,7 +400,6 @@ public void testBulkUpdateLargerVolume() throws Exception { builder.add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId(Integer.toString(i)) .setFetchSource("counter", null) .setScript(script) @@ -457,7 +431,6 @@ public void testBulkUpdateLargerVolume() throws Exception { for (int i = 0; i < numDocs; i++) { UpdateRequestBuilder updateBuilder = client().prepareUpdate() .setIndex("test") - .setType("type1") .setId(Integer.toString(i)) .setFetchSource("counter", null); if (i % 2 == 0) { @@ -488,7 +461,7 @@ public void testBulkUpdateLargerVolume() throws Exception { builder = client().prepareBulk(); int maxDocs = numDocs / 2 + numDocs; for (int i = (numDocs / 2); i < maxDocs; i++) { - builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)).setScript(script)); + builder.add(client().prepareUpdate().setIndex("test").setId(Integer.toString(i)).setScript(script)); } response = builder.execute().actionGet(); assertThat(response.hasFailures(), equalTo(true)); @@ -511,7 +484,6 @@ public void testBulkUpdateLargerVolume() throws Exception { builder.add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId(Integer.toString(i)) .setScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op = \"none\"", Collections.emptyMap())) ); @@ -531,7 +503,6 @@ public void testBulkUpdateLargerVolume() throws Exception { builder.add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId(Integer.toString(i)) .setScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op = \"delete\"", Collections.emptyMap())) ); @@ -567,7 +538,7 @@ public void testBulkIndexingWhileInitializing() throws Exception { for (int i = 0; i < numDocs;) { final BulkRequestBuilder builder = client().prepareBulk(); for (int j = 0; j < bulk && i < numDocs; j++, i++) { - builder.add(client().prepareIndex("test", "type1", Integer.toString(i)).setSource("val", i)); + builder.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("val", i)); } logger.info("bulk indexing {}-{}", i - bulk, i - 1); BulkResponse response = builder.get(); @@ -599,7 +570,7 @@ public void testFailingVersionedUpdatedOnBulk() throws Exception { } BulkRequestBuilder requestBuilder = client().prepareBulk(); requestBuilder.add( - client().prepareUpdate("test", "type", "1") + client().prepareUpdate("test", "1") .setIfSeqNo(0L) .setIfPrimaryTerm(1) .setDoc(Requests.INDEX_CONTENT_TYPE, "field", threadID) @@ -643,7 +614,7 @@ public void testThatInvalidIndexNamesShouldNotBreakCompleteBulkRequest() { } else { name = "test"; } - builder.add(client().prepareIndex().setIndex(name).setType("type1").setId("1").setSource("field", 1)); + builder.add(client().prepareIndex().setIndex(name).setId("1").setSource("field", 1)); } BulkResponse bulkResponse = builder.get(); assertThat(bulkResponse.hasFailures(), is(expectFailure)); @@ -721,7 +692,7 @@ public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception { public void testFailedRequestsOnClosedIndex() throws Exception { createIndex("bulkindex1"); - client().prepareIndex("bulkindex1", "index1_type", "1").setSource("text", "test").get(); + client().prepareIndex("bulkindex1").setId("1").setSource("text", "test").get(); assertBusy(() -> assertAcked(client().admin().indices().prepareClose("bulkindex1"))); BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(RefreshPolicy.IMMEDIATE); @@ -744,21 +715,9 @@ public void testFailedRequestsOnClosedIndex() throws Exception { // issue 9821 public void testInvalidIndexNamesCorrectOpType() { BulkResponse bulkResponse = client().prepareBulk() - .add( - client().prepareIndex() - .setIndex("INVALID.NAME") - .setType("type1") - .setId("1") - .setSource(Requests.INDEX_CONTENT_TYPE, "field", 1) - ) - .add( - client().prepareUpdate() - .setIndex("INVALID.NAME") - .setType("type1") - .setId("1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", randomInt()) - ) - .add(client().prepareDelete().setIndex("INVALID.NAME").setType("type1").setId("1")) + .add(client().prepareIndex().setIndex("INVALID.NAME").setId("1").setSource(Requests.INDEX_CONTENT_TYPE, "field", 1)) + .add(client().prepareUpdate().setIndex("INVALID.NAME").setId("1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", randomInt())) + .add(client().prepareDelete().setIndex("INVALID.NAME").setId("1")) .get(); assertThat(bulkResponse.getItems().length, is(3)); assertThat(bulkResponse.getItems()[0].getOpType(), is(OpType.INDEX)); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/search/SearchProgressActionListenerIT.java b/server/src/internalClusterTest/java/org/opensearch/action/search/SearchProgressActionListenerIT.java index 1309bf74bf809..eb69eaaa9c2e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/search/SearchProgressActionListenerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/search/SearchProgressActionListenerIT.java @@ -213,7 +213,7 @@ private static List createRandomIndices(Client client) { for (int i = 0; i < numIndices; i++) { String indexName = String.format(Locale.ROOT, "index-%03d", i); assertAcked(client.admin().indices().prepareCreate(indexName).get()); - client.prepareIndex(indexName, "doc", Integer.toString(i)).setSource("number", i, "foo", "bar").get(); + client.prepareIndex(indexName).setId(Integer.toString(i)).setSource("number", i, "foo", "bar").get(); } client.admin().indices().prepareRefresh("index-*").get(); ClusterSearchShardsResponse resp = client.admin().cluster().prepareSearchShards("index-*").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java index 9ed76d6fe8f99..c7985d972de5e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java @@ -358,9 +358,9 @@ public void testSearchIdle() throws Exception { } } }); - client().prepareIndex("test", "_doc").setId("1").setSource("created_date", "2020-01-01").get(); - client().prepareIndex("test", "_doc").setId("2").setSource("created_date", "2020-01-02").get(); - client().prepareIndex("test", "_doc").setId("3").setSource("created_date", "2020-01-03").get(); + client().prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); + client().prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); + client().prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get(); assertBusy(() -> { SearchResponse resp = client().prepareSearch("test") .setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) @@ -491,7 +491,7 @@ private void indexSomeDocs(String indexName, int numberOfShards, int numberOfDoc createIndex(indexName, Settings.builder().put("index.number_of_shards", numberOfShards).build()); for (int i = 0; i < numberOfDocs; i++) { - IndexResponse indexResponse = client().prepareIndex(indexName, "_doc").setSource("number", randomInt()).get(); + IndexResponse indexResponse = client().prepareIndex(indexName).setSource("number", randomInt()).get(); assertEquals(RestStatus.CREATED, indexResponse.status()); } client().admin().indices().prepareRefresh(indexName).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/support/WaitActiveShardCountIT.java b/server/src/internalClusterTest/java/org/opensearch/action/support/WaitActiveShardCountIT.java index 9e93235c29729..e919b2b85e079 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/support/WaitActiveShardCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/support/WaitActiveShardCountIT.java @@ -63,9 +63,10 @@ public void testReplicationWaitsForActiveShardCount() throws Exception { assertAcked(createIndexResponse); // indexing, by default, will work (waiting for one shard copy only) - client().prepareIndex("test", "type1", "1").setSource(source("1", "test"), XContentType.JSON).execute().actionGet(); + client().prepareIndex("test").setId("1").setSource(source("1", "test"), XContentType.JSON).execute().actionGet(); try { - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(source("1", "test"), XContentType.JSON) .setWaitForActiveShards(2) // wait for 2 active shard copies .setTimeout(timeValueMillis(100)) @@ -96,7 +97,8 @@ public void testReplicationWaitsForActiveShardCount() throws Exception { assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); // this should work, since we now have two - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(source("1", "test"), XContentType.JSON) .setWaitForActiveShards(2) .setTimeout(timeValueSeconds(1)) @@ -104,7 +106,8 @@ public void testReplicationWaitsForActiveShardCount() throws Exception { .actionGet(); try { - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(source("1", "test"), XContentType.JSON) .setWaitForActiveShards(ActiveShardCount.ALL) .setTimeout(timeValueMillis(100)) @@ -138,7 +141,8 @@ public void testReplicationWaitsForActiveShardCount() throws Exception { assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); // this should work, since we now have all shards started - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(source("1", "test"), XContentType.JSON) .setWaitForActiveShards(ActiveShardCount.ALL) .setTimeout(timeValueSeconds(1)) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/support/master/IndexingMasterFailoverIT.java b/server/src/internalClusterTest/java/org/opensearch/action/support/master/IndexingMasterFailoverIT.java index 2aa29304577b4..f8db63bc8b61d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/support/master/IndexingMasterFailoverIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/support/master/IndexingMasterFailoverIT.java @@ -97,8 +97,8 @@ public void run() { return; } for (int i = 0; i < 10; i++) { - // index data with mapping changes - IndexResponse response = client(dataNode).prepareIndex("myindex", "mytype").setSource("field_" + i, "val").get(); + // index data + IndexResponse response = client(dataNode).prepareIndex("myindex").setSource("field_" + i, "val").get(); assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java index 52333061f3e6b..d7017122d221c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java @@ -92,7 +92,7 @@ public void testNoSuchDoc() throws Exception { .endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); - client().prepareIndex("test", "type1", "666").setSource("field", "foo bar").execute().actionGet(); + client().prepareIndex("test").setId("667").setSource("field", "foo bar").execute().actionGet(); refresh(); for (int i = 0; i < 20; i++) { ActionFuture termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "" + i)); @@ -119,7 +119,7 @@ public void testExistingFieldWithNoTermVectorsNoNPE() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); // when indexing a field that simply has a question mark, the term vectors will be null - client().prepareIndex("test", "type1", "0").setSource("existingfield", "?").execute().actionGet(); + client().prepareIndex("test").setId("0").setSource("existingfield", "?").execute().actionGet(); refresh(); ActionFuture termVector = client().termVectors( new TermVectorsRequest(indexOrAlias(), "0").selectedFields(new String[] { "existingfield" }) @@ -147,7 +147,7 @@ public void testExistingFieldButNotInDocNPE() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); // when indexing a field that simply has a question mark, the term vectors will be null - client().prepareIndex("test", "type1", "0").setSource("anotherexistingfield", 1).execute().actionGet(); + client().prepareIndex("test").setId("0").setSource("anotherexistingfield", 1).execute().actionGet(); refresh(); ActionFuture termVectors = client().termVectors( new TermVectorsRequest(indexOrAlias(), "0").selectedFields(randomBoolean() ? new String[] { "existingfield" } : null) @@ -186,7 +186,7 @@ public void testNotIndexedField() throws Exception { List indexBuilders = new ArrayList<>(); for (int i = 0; i < 6; i++) { - indexBuilders.add(client().prepareIndex().setIndex("test").setType("type1").setId(String.valueOf(i)).setSource("field" + i, i)); + indexBuilders.add(client().prepareIndex().setIndex("test").setId(String.valueOf(i)).setSource("field" + i, i)); } indexRandom(true, indexBuilders); @@ -228,7 +228,8 @@ public void testSimpleTermVectors() throws IOException { ) ); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("field", "the quick brown fox jumps over the lazy dog") @@ -336,7 +337,8 @@ public void testRandomSingleTermVectors() throws IOException { ) ); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "_doc", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("field", "the quick brown fox jumps over the lazy dog") @@ -492,7 +494,7 @@ public void testSimpleTermVectorsWithGenerate() throws IOException { ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource(source).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource(source).execute().actionGet(); refresh(); } @@ -575,9 +577,7 @@ public void testDuelWithAndWithoutTermVectors() throws IOException, ExecutionExc List indexBuilders = new ArrayList<>(); for (String indexName : indexNames) { for (int id = 0; id < content.length; id++) { - indexBuilders.add( - client().prepareIndex().setIndex(indexName).setType("type1").setId(String.valueOf(id)).setSource("field1", content[id]) - ); + indexBuilders.add(client().prepareIndex().setIndex(indexName).setId(String.valueOf(id)).setSource("field1", content[id])); } } indexRandom(true, indexBuilders); @@ -654,7 +654,7 @@ public void testSimpleWildCards() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); ensureGreen(); - client().prepareIndex("test", "type1", "0").setSource(source).get(); + client().prepareIndex("test").setId("0").setSource(source).get(); refresh(); TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "0").setSelectedFields("field*").get(); @@ -680,9 +680,7 @@ public void testArtificialVsExisting() throws ExecutionException, InterruptedExc List indexBuilders = new ArrayList<>(); for (int i = 0; i < content.length; i++) { - indexBuilders.add( - client().prepareIndex().setIndex("test").setType("type1").setId(String.valueOf(i)).setSource("field1", content[i]) - ); + indexBuilders.add(client().prepareIndex().setIndex("test").setId(String.valueOf(i)).setSource("field1", content[i])); } indexRandom(true, indexBuilders); @@ -770,7 +768,7 @@ public void testPerFieldAnalyzer() throws IOException { ensureGreen(); // index a single document with prepared source - client().prepareIndex("test", "type1", "0").setSource(source).get(); + client().prepareIndex("test").setId("0").setSource(source).get(); refresh(); // create random per_field_analyzer and selected fields @@ -844,7 +842,7 @@ public void testTermVectorsWithVersion() { assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); // From translog: @@ -890,7 +888,7 @@ public void testTermVectorsWithVersion() { } logger.info("--> index doc 1 again, so increasing the version"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); // From translog: @@ -953,7 +951,7 @@ public void testFilterLength() throws ExecutionException, InterruptedException, } tags.add(tag); } - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("tags", tags)); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("tags", tags)); logger.info("Checking best tags by longest to shortest size ..."); TermVectorsRequest.FilterSettings filterSettings = new TermVectorsRequest.FilterSettings(); @@ -989,7 +987,7 @@ public void testFilterTermFreq() throws ExecutionException, InterruptedException } uniqueTags.add(tag); } - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("tags", tags)); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("tags", tags)); logger.info("Checking best tags by highest to lowest term freq ..."); TermVectorsRequest.FilterSettings filterSettings = new TermVectorsRequest.FilterSettings(); @@ -1020,7 +1018,7 @@ public void testFilterDocFreq() throws ExecutionException, InterruptedException, List tags = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { tags.add("tag_" + i); - builders.add(client().prepareIndex("test", "type1", i + "").setSource("tags", tags)); + builders.add(client().prepareIndex("test").setId(i + "").setSource("tags", tags)); } indexRandom(true, builders); @@ -1048,7 +1046,7 @@ public void testArtificialDocWithPreference() throws InterruptedException, IOExc ensureGreen(); // index document - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "random permutation")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("field1", "random permutation")); // Get search shards ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards("test").get(); @@ -1111,7 +1109,6 @@ public void testWithKeywordAndNormalizer() throws IOException, ExecutionExceptio indexBuilders.add( client().prepareIndex() .setIndex(indexName) - .setType("type1") .setId(String.valueOf(id)) .setSource("field1", content[id], "field2", content[id]) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java index 1228ec85c2b08..91d280a9c4771 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java @@ -101,7 +101,7 @@ public void testMultiTermVectorsWithVersion() throws Exception { assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 3; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog @@ -150,7 +150,7 @@ public void testMultiTermVectorsWithVersion() throws Exception { assertThat(response.getResponses()[2].getFailure().getCause().getCause(), instanceOf(VersionConflictEngineException.class)); for (int i = 0; i < 3; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog diff --git a/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java index 541fe495ee8e8..2d01e4c031538 100644 --- a/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java @@ -470,9 +470,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { logger.info("--> creating indices"); createIndex("test1", "test2", "test3"); - assertAcked( - client().admin().indices().preparePutMapping("test1", "test2", "test3").setType("type1").setSource("name", "type=text") - ); + assertAcked(client().admin().indices().preparePutMapping("test1", "test2", "test3").setSource("name", "type=text")); ensureGreen(); @@ -862,11 +860,7 @@ public void testIndicesGetAliases() throws Exception { createIndex("bazbar"); assertAcked( - client().admin() - .indices() - .preparePutMapping("foobar", "test", "test123", "foobarbaz", "bazbar") - .setType("type") - .setSource("field", "type=text") + client().admin().indices().preparePutMapping("foobar", "test", "test123", "foobarbaz", "bazbar").setSource("field", "type=text") ); ensureGreen(); @@ -1206,7 +1200,7 @@ public void testAliasFilterWithNowInRangeFilterAndQuery() throws Exception { final int numDocs = scaledRandomIntBetween(5, 52); for (int i = 1; i <= numDocs; i++) { - client().prepareIndex("my-index", "my-type").setSource("timestamp", "2016-12-12").get(); + client().prepareIndex("my-index").setSource("timestamp", "2016-12-12").get(); if (i % 2 == 0) { refresh(); SearchResponse response = client().prepareSearch("filter1").get(); @@ -1305,7 +1299,7 @@ public void testAliasActionRemoveIndex() throws InterruptedException, ExecutionE public void testRemoveIndexAndReplaceWithAlias() throws InterruptedException, ExecutionException { assertAcked(client().admin().indices().prepareCreate("test")); - indexRandom(true, client().prepareIndex("test_2", "test", "test").setSource("test", "test")); + indexRandom(true, client().prepareIndex("test_2").setId("test").setSource("test", "test")); assertAliasesVersionIncreases( "test_2", () -> assertAcked(client().admin().indices().prepareAliases().addAlias("test_2", "test").removeIndex("test")) diff --git a/server/src/internalClusterTest/java/org/opensearch/blocks/SimpleBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/blocks/SimpleBlocksIT.java index 7bb8f0cc318af..8ede3e25b2e1a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/blocks/SimpleBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/blocks/SimpleBlocksIT.java @@ -162,7 +162,7 @@ private void canNotCreateIndex(String index) { private void canIndexDocument(String index) { try { - IndexRequestBuilder builder = client().prepareIndex(index, "zzz"); + IndexRequestBuilder builder = client().prepareIndex(index); builder.setSource("foo", "bar"); IndexResponse r = builder.execute().actionGet(); assertThat(r, notNullValue()); @@ -173,7 +173,7 @@ private void canIndexDocument(String index) { private void canNotIndexDocument(String index) { try { - IndexRequestBuilder builder = client().prepareIndex(index, "zzz"); + IndexRequestBuilder builder = client().prepareIndex(index); builder.setSource("foo", "bar"); builder.execute().actionGet(); fail(); @@ -306,7 +306,7 @@ public void testAddIndexBlock() throws Exception { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "zzz").setId(String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); @@ -332,7 +332,7 @@ public void testSameBlockTwice() throws Exception { false, randomBoolean(), IntStream.range(0, randomIntBetween(1, 10)) - .mapToObj(i -> client().prepareIndex(indexName, "zzz").setId(String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); } @@ -378,7 +378,7 @@ public void testConcurrentAddBlock() throws InterruptedException { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "zzz").setId(String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); ensureYellowAndNoInitializingShards(indexName); @@ -460,7 +460,7 @@ public void testAddBlockWhileDeletingIndices() throws Exception { false, randomBoolean(), IntStream.range(0, 10) - .mapToObj(n -> client().prepareIndex(indexName, "zzz").setId(String.valueOf(n)).setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setId(String.valueOf(n)).setSource("num", n)) .collect(toList()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/client/documentation/IndicesDocumentationIT.java b/server/src/internalClusterTest/java/org/opensearch/client/documentation/IndicesDocumentationIT.java deleted file mode 100644 index 6108e8ee8efe3..0000000000000 --- a/server/src/internalClusterTest/java/org/opensearch/client/documentation/IndicesDocumentationIT.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.client.documentation; - -import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.opensearch.client.Client; -import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.collect.ImmutableOpenMap; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.test.OpenSearchIntegTestCase; - -import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.instanceOf; - -/** - * This class is used to generate the Java indices administration documentation. - * You need to wrap your code between two tags like: - * // tag::example[] - * // end::example[] - * - * Where example is your tag name. - * - * Then in the documentation, you can extract what is between tag and end tags - * with ["source","java",subs="attributes,callouts,macros"] - * -------------------------------------------------- - * include-tagged::{client-tests}/IndicesDocumentationIT.java[your-example-tag-here] - * -------------------------------------------------- - */ -public class IndicesDocumentationIT extends OpenSearchIntegTestCase { - - /** - * This test method is used to generate the Put Mapping Java Indices API documentation - * at "docs/java-api/admin/indices/put-mapping.asciidoc" so the documentation gets tested - * so that it compiles and runs without throwing errors at runtime. - */ - public void testPutMappingDocumentation() throws Exception { - Client client = client(); - - // tag::index-with-mapping - client.admin().indices().prepareCreate("twitter") // <1> - .addMapping("_doc", "message", "type=text") // <2> - .get(); - // end::index-with-mapping - GetMappingsResponse getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); - assertEquals(1, getMappingsResponse.getMappings().size()); - ImmutableOpenMap indexMapping = getMappingsResponse.getMappings().get("twitter"); - assertThat(indexMapping.get("_doc"), instanceOf(MappingMetadata.class)); - - // we need to delete in order to create a fresh new index with another type - client.admin().indices().prepareDelete("twitter").get(); - client.admin().indices().prepareCreate("twitter").get(); - - // tag::putMapping-request-source - client.admin().indices().preparePutMapping("twitter") // <1> - .setType("_doc") - .setSource("{\n" + - " \"properties\": {\n" + - " \"name\": {\n" + // <2> - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}", XContentType.JSON) - .get(); - - // You can also provide the type in the source document - client.admin().indices().preparePutMapping("twitter") - .setType("_doc") - .setSource("{\n" + - " \"_doc\":{\n" + // <3> - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}", XContentType.JSON) - .get(); - // end::putMapping-request-source - getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); - assertEquals(1, getMappingsResponse.getMappings().size()); - indexMapping = getMappingsResponse.getMappings().get("twitter"); - assertEquals( - singletonMap("properties", singletonMap("name", singletonMap("type", "text"))), - indexMapping.get("_doc").getSourceAsMap() - ); - } - -} diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/MinimumMasterNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/MinimumMasterNodesIT.java index 0374ef7d1b59b..c3dc686921eb6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/MinimumMasterNodesIT.java @@ -121,7 +121,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { NumShards numShards = getNumShards("test"); logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").execute().actionGet(); } // make sure that all shards recovered before trying to flush assertThat( @@ -286,7 +286,7 @@ public void testThreeNodesNoMasterBlock() throws Exception { NumShards numShards = getNumShards("test"); logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").execute().actionGet(); } ensureGreen(); // make sure that all shards recovered before trying to flush diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java index f8f686b27f29b..cef22343a1fea 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java @@ -165,7 +165,7 @@ public void testNoMasterActions() throws Exception { checkUpdateAction( false, timeout, - clientToMasterlessNode.prepareUpdate("test", "type1", "1") + clientToMasterlessNode.prepareUpdate("test", "1") .setScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", Collections.emptyMap())) .setTimeout(timeout) ); @@ -173,39 +173,41 @@ public void testNoMasterActions() throws Exception { checkUpdateAction( true, timeout, - clientToMasterlessNode.prepareUpdate("no_index", "type1", "1") + clientToMasterlessNode.prepareUpdate("no_index", "1") .setScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", Collections.emptyMap())) .setTimeout(timeout) ); checkWriteAction( - clientToMasterlessNode.prepareIndex("test", "type1", "1") + clientToMasterlessNode.prepareIndex("test") + .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) ); checkWriteAction( - clientToMasterlessNode.prepareIndex("no_index", "type1", "1") + clientToMasterlessNode.prepareIndex("no_index") + .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) ); BulkRequestBuilder bulkRequestBuilder = clientToMasterlessNode.prepareBulk(); bulkRequestBuilder.add( - clientToMasterlessNode.prepareIndex("test", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject().endObject()) + clientToMasterlessNode.prepareIndex("test").setId("1").setSource(XContentFactory.jsonBuilder().startObject().endObject()) ); bulkRequestBuilder.add( - clientToMasterlessNode.prepareIndex("test", "type1", "2").setSource(XContentFactory.jsonBuilder().startObject().endObject()) + clientToMasterlessNode.prepareIndex("test").setId("2").setSource(XContentFactory.jsonBuilder().startObject().endObject()) ); bulkRequestBuilder.setTimeout(timeout); checkWriteAction(bulkRequestBuilder); bulkRequestBuilder = clientToMasterlessNode.prepareBulk(); bulkRequestBuilder.add( - clientToMasterlessNode.prepareIndex("no_index", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject().endObject()) + clientToMasterlessNode.prepareIndex("no_index").setId("1").setSource(XContentFactory.jsonBuilder().startObject().endObject()) ); bulkRequestBuilder.add( - clientToMasterlessNode.prepareIndex("no_index", "type1", "2").setSource(XContentFactory.jsonBuilder().startObject().endObject()) + clientToMasterlessNode.prepareIndex("no_index").setId("2").setSource(XContentFactory.jsonBuilder().startObject().endObject()) ); bulkRequestBuilder.setTimeout(timeout); checkWriteAction(bulkRequestBuilder); @@ -252,8 +254,8 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception { Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 3).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ).get(); client().admin().cluster().prepareHealth("_all").setWaitForGreenStatus().get(); - client().prepareIndex("test1", "type1", "1").setSource("field", "value1").get(); - client().prepareIndex("test2", "type1", "1").setSource("field", "value1").get(); + client().prepareIndex("test1").setId("1").setSource("field", "value1").get(); + client().prepareIndex("test2").setId("1").setSource("field", "value1").get(); refresh(); ensureSearchable("test1", "test2"); @@ -292,7 +294,7 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception { TimeValue timeout = TimeValue.timeValueMillis(200); long now = System.currentTimeMillis(); try { - clientToMasterlessNode.prepareUpdate("test1", "type1", "1") + clientToMasterlessNode.prepareUpdate("test1", "1") .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") .setTimeout(timeout) .get(); @@ -306,7 +308,8 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception { } try { - clientToMasterlessNode.prepareIndex("test1", "type1", "1") + clientToMasterlessNode.prepareIndex("test1") + .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) .get(); @@ -330,7 +333,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) ).get(); client().admin().cluster().prepareHealth("_all").setWaitForGreenStatus().get(); - client().prepareIndex("test1", "type1").setId("1").setSource("field", "value1").get(); + client().prepareIndex("test1").setId("1").setSource("field", "value1").get(); refresh(); ensureGreen("test1"); @@ -388,20 +391,20 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { ); TimeValue timeout = TimeValue.timeValueMillis(200); - client(randomFrom(nodesWithShards)).prepareUpdate("test1", "type1", "1") + client(randomFrom(nodesWithShards)).prepareUpdate("test1", "1") .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") .setTimeout(timeout) .get(); expectThrows( Exception.class, - () -> client(partitionedNode).prepareUpdate("test1", "type1", "1") + () -> client(partitionedNode).prepareUpdate("test1", "1") .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") .setTimeout(timeout) .get() ); - client(randomFrom(nodesWithShards)).prepareIndex("test1", "type1") + client(randomFrom(nodesWithShards)).prepareIndex("test1") .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) @@ -410,7 +413,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { // dynamic mapping updates fail expectThrows( MasterNotDiscoveredException.class, - () -> client(randomFrom(nodesWithShards)).prepareIndex("test1", "type1") + () -> client(randomFrom(nodesWithShards)).prepareIndex("test1") .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().field("new_field", "value").endObject()) .setTimeout(timeout) @@ -420,7 +423,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { // dynamic index creation fails expectThrows( MasterNotDiscoveredException.class, - () -> client(randomFrom(nodesWithShards)).prepareIndex("test2", "type1") + () -> client(randomFrom(nodesWithShards)).prepareIndex("test2") .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) @@ -429,7 +432,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { expectThrows( Exception.class, - () -> client(partitionedNode).prepareIndex("test1", "type1") + () -> client(partitionedNode).prepareIndex("test1") .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java index 931a67655a92f..93a903e0b5e0c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java @@ -292,24 +292,15 @@ public void testLargeClusterStatePublishing() throws Exception { .get() ); ensureGreen(); // wait for green state, so its both green, and there are no more pending events - MappingMetadata masterMappingMetadata = client().admin() - .indices() - .prepareGetMappings("test") - .setTypes("type") - .get() - .getMappings() - .get("test") - .get("type"); + MappingMetadata masterMappingMetadata = client().admin().indices().prepareGetMappings("test").get().getMappings().get("test"); for (Client client : clients()) { MappingMetadata mappingMetadata = client.admin() .indices() .prepareGetMappings("test") - .setTypes("type") .setLocal(true) .get() .getMappings() - .get("test") - .get("type"); + .get("test"); assertThat(mappingMetadata.source().string(), equalTo(masterMappingMetadata.source().string())); assertThat(mappingMetadata, equalTo(masterMappingMetadata)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java index 1c5ff5deada1d..cdf853c2ad9ae 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java @@ -335,7 +335,7 @@ private void rerouteWithAllocateLocalGateway(Settings commonSettings) throws Exc ); if (closed == false) { - client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field", "value").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); } final Index index = resolveIndex("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/FilteringAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/FilteringAllocationIT.java index 6e3ffc79da580..398adbd0d1ca5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/FilteringAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/FilteringAllocationIT.java @@ -72,7 +72,7 @@ public void testDecommissionNodeNoReplicas() { ensureGreen("test"); logger.info("--> index some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } client().admin().indices().prepareRefresh().execute().actionGet(); assertThat( @@ -187,7 +187,7 @@ public void testDisablingAllocationFiltering() { logger.info("--> index some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } client().admin().indices().prepareRefresh().execute().actionGet(); assertThat( diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java index 233dca2dabb28..9e3a693d9bdc4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java @@ -53,7 +53,6 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.allocation.AllocationService; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.discovery.Discovery; @@ -177,14 +176,16 @@ public void testDeleteCreateInOneBulk() throws Exception { internalCluster().startMasterOnlyNode(); String dataNode = internalCluster().startDataOnlyNode(); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); - prepareCreate("test").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)).addMapping("type").get(); + prepareCreate("test").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) + .addMapping(MapperService.SINGLE_MAPPING_NAME) + .get(); ensureGreen("test"); // block none master node. BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(dataNode, random()); internalCluster().setDisruptionScheme(disruption); logger.info("--> indexing a doc"); - index("test", "type", "1"); + index("test", MapperService.SINGLE_MAPPING_NAME, "1"); refresh(); disruption.startDisrupting(); logger.info("--> delete index and recreate it"); @@ -264,19 +265,12 @@ public void testDelayedMappingPropagationOnPrimary() throws Exception { // Add a new mapping... ActionFuture putMappingResponse = executeAndCancelCommittedPublication( - client().admin().indices().preparePutMapping("index").setType("type").setSource("field", "type=long") + client().admin().indices().preparePutMapping("index").setSource("field", "type=long") ); // ...and wait for mappings to be available on master assertBusy(() -> { - ImmutableOpenMap indexMappings = client().admin() - .indices() - .prepareGetMappings("index") - .get() - .getMappings() - .get("index"); - assertNotNull(indexMappings); - MappingMetadata typeMappings = indexMappings.get("type"); + MappingMetadata typeMappings = client().admin().indices().prepareGetMappings("index").get().getMappings().get("index"); assertNotNull(typeMappings); Object properties; try { @@ -291,7 +285,7 @@ public void testDelayedMappingPropagationOnPrimary() throws Exception { // this request does not change the cluster state, because mapping is already created, // we don't await and cancel committed publication - ActionFuture docIndexResponse = client().prepareIndex("index", "type", "1").setSource("field", 42).execute(); + ActionFuture docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute(); // Wait a bit to make sure that the reason why we did not get a response // is that cluster state processing is blocked and not just that it takes @@ -361,7 +355,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { internalCluster().setDisruptionScheme(disruption); disruption.startDisrupting(); final ActionFuture putMappingResponse = executeAndCancelCommittedPublication( - client().admin().indices().preparePutMapping("index").setType("type").setSource("field", "type=long") + client().admin().indices().preparePutMapping("index").setSource("field", "type=long") ); final Index index = resolveIndex("index"); @@ -371,12 +365,12 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { final IndexService indexService = indicesService.indexServiceSafe(index); assertNotNull(indexService); final MapperService mapperService = indexService.mapperService(); - DocumentMapper mapper = mapperService.documentMapper("type"); + DocumentMapper mapper = mapperService.documentMapper(MapperService.SINGLE_MAPPING_NAME); assertNotNull(mapper); assertNotNull(mapper.mappers().getMapper("field")); }); - final ActionFuture docIndexResponse = client().prepareIndex("index", "type", "1").setSource("field", 42).execute(); + final ActionFuture docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute(); assertBusy(() -> assertTrue(client().prepareGet("index", "1").get().isExists())); @@ -386,7 +380,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { // this request does not change the cluster state, because the mapping is dynamic, // we need to await and cancel committed publication ActionFuture dynamicMappingsFut = executeAndCancelCommittedPublication( - client().prepareIndex("index", "type", "2").setSource("field2", 42) + client().prepareIndex("index").setId("2").setSource("field2", 42) ); // ...and wait for second mapping to be available on master @@ -395,7 +389,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { final IndexService indexService = indicesService.indexServiceSafe(index); assertNotNull(indexService); final MapperService mapperService = indexService.mapperService(); - DocumentMapper mapper = mapperService.documentMapper("type"); + DocumentMapper mapper = mapperService.documentMapper(MapperService.SINGLE_MAPPING_NAME); assertNotNull(mapper); assertNotNull(mapper.mappers().getMapper("field2")); }); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java index ea5bb145cfd75..1447379b93ec8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java @@ -430,7 +430,7 @@ public void testAllMasterEligibleNodesFailedDanglingIndexImport() throws Excepti ensureStableCluster(2); logger.info("--> index 1 doc and ensure index is green"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); ensureGreen("test"); assertBusy( () -> internalCluster().getInstances(IndicesService.class) diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/AllocationIdIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/AllocationIdIT.java index 2dad58550228e..a20e944caebb2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/AllocationIdIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/AllocationIdIT.java @@ -193,7 +193,7 @@ private int indexDocs(String indexName, Object... source) throws InterruptedExce final int numExtraDocs = between(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numExtraDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource(source); + builders[i] = client().prepareIndex(indexName).setSource(source); } indexRandom(true, false, true, Arrays.asList(builders)); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/DelayedAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/DelayedAllocationIT.java index bf19444db9159..b4b08a4c9ad73 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/DelayedAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/DelayedAllocationIT.java @@ -205,7 +205,7 @@ private void indexRandomData() throws Exception { int numDocs = scaledRandomIntBetween(100, 1000); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } // we want to test both full divergent copies of the shard in terms of segments, and // a case where they are the same (using sync flush), index Random does all this goodness diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java index 8418101bc7a09..55bdc2a4ac3c4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java @@ -128,8 +128,8 @@ public void testBulkWeirdScenario() throws Exception { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex().setIndex("test").setType("_doc").setId("1").setSource("field1", "value1")) - .add(client().prepareUpdate().setIndex("test").setType("_doc").setId("1").setDoc("field2", "value2")) + .add(client().prepareIndex().setIndex("test").setId("1").setSource("field1", "value1")) + .add(client().prepareUpdate().setIndex("test").setId("1").setDoc("field2", "value2")) .execute() .actionGet(); @@ -150,7 +150,7 @@ public void testBulkWeirdScenario() throws Exception { // returns data paths settings of in-sync shard copy private Settings createStaleReplicaScenario(String master) throws Exception { - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); refresh(); ClusterState state = client().admin().cluster().prepareState().all().get().getState(); List shards = state.routingTable().allShards("test"); @@ -177,7 +177,7 @@ private Settings createStaleReplicaScenario(String master) throws Exception { ensureStableCluster(2, master); logger.info("--> index a document into previous replica shard (that is now primary)"); - client(replicaNode).prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client(replicaNode).prepareIndex("test").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); logger.info("--> shut down node that has new acknowledged document"); final Settings inSyncDataPathSettings = internalCluster().dataPathSettings(replicaNode); @@ -558,7 +558,7 @@ public void testRemoveAllocationIdOnWriteAfterNodeLeave() throws Exception { ensureYellow("test"); assertEquals(2, client().admin().cluster().prepareState().get().getState().metadata().index("test").inSyncAllocationIds(0).size()); logger.info("--> indexing..."); - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); assertEquals(1, client().admin().cluster().prepareState().get().getState().metadata().index("test").inSyncAllocationIds(0).size()); internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { @Override @@ -595,7 +595,7 @@ public void testNotWaitForQuorumCopies() throws Exception { .get() ); ensureGreen("test"); - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); logger.info("--> removing 2 nodes from cluster"); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(1), nodes.get(2))); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(1), nodes.get(2))); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java index 35a27f8b6b176..96f059695e719 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java @@ -302,7 +302,7 @@ private long createReasonableSizedShards(final String indexName) throws Interrup while (true) { final IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[scaledRandomIntBetween(100, 10000)]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex(indexName, "_doc").setSource("field", randomAlphaOfLength(10)); + indexRequestBuilders[i] = client().prepareIndex(indexName).setSource("field", randomAlphaOfLength(10)); } indexRandom(true, indexRequestBuilders); forceMerge(); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index fea19fe450c7c..05b0f10be02f3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -212,7 +212,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { assertThat("node2 has 2 shards", shardCountByNodeId.get(nodeIds.get(2)), equalTo(2)); } - client().prepareIndex("test", "doc", "1").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); assertSearchHits(client().prepareSearch("test").get(), "1"); // Move all nodes above the low watermark so no shard movement can occur, and at least one node above the flood stage watermark so @@ -227,7 +227,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { assertBusy( () -> assertBlocked( - client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar"), + client().prepareIndex().setIndex("test").setId("1").setSource("foo", "bar"), IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK ) ); @@ -236,7 +236,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { // Cannot add further documents assertBlocked( - client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"), + client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK ); assertSearchHits(client().prepareSearch("test").get(), "1"); @@ -249,7 +249,8 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { // Attempt to create a new document until DiskUsageMonitor unblocks the index assertBusy(() -> { try { - client().prepareIndex("test", "doc", "3") + client().prepareIndex("test") + .setId("3") .setSource("foo", "bar") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterSearchShardsIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterSearchShardsIT.java index f804648e82e91..86e83e01b008c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterSearchShardsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterSearchShardsIT.java @@ -173,7 +173,7 @@ public void testClusterSearchShardsWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test-blocks", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test-blocks").setId("" + i).setSource("test", "init").execute().actionGet(); } ensureGreen("test-blocks"); diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionCleanSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionCleanSettingsIT.java index 1754c61a1ab61..61a47d2bb0237 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionCleanSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionCleanSettingsIT.java @@ -80,9 +80,7 @@ public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Excep final String node_2 = internalCluster().startDataOnlyNode(); List indexRequestBuilderList = new ArrayList<>(); for (int i = 0; i < 100; i++) { - indexRequestBuilderList.add( - client().prepareIndex().setIndex("test").setType("_doc").setSource("{\"int_field\":1}", XContentType.JSON) - ); + indexRequestBuilderList.add(client().prepareIndex().setIndex("test").setSource("{\"int_field\":1}", XContentType.JSON)); } indexRandom(true, indexRequestBuilderList); diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java index 6da62ab5107c9..53002a38c3a9d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java @@ -171,7 +171,8 @@ public void testAckedIndexing() throws Exception { id = Integer.toString(idGenerator.incrementAndGet()); int shard = Math.floorMod(Murmur3HashFunction.hash(id), numPrimaries); logger.trace("[{}] indexing id [{}] through node [{}] targeting shard [{}]", name, id, node, shard); - IndexRequestBuilder indexRequestBuilder = client.prepareIndex("test", "type", id) + IndexRequestBuilder indexRequestBuilder = client.prepareIndex("test") + .setId(id) .setSource(Collections.singletonMap(randomFrom(fieldNames), randomNonNegativeLong()), XContentType.JSON) .setTimeout(timeout); @@ -308,10 +309,7 @@ public void testRejoinDocumentExistsInAllShardCopies() throws Exception { ensureStableCluster(2, notIsolatedNode); assertFalse(client(notIsolatedNode).admin().cluster().prepareHealth("test").setWaitForYellowStatus().get().isTimedOut()); - IndexResponse indexResponse = internalCluster().client(notIsolatedNode) - .prepareIndex("test", "type") - .setSource("field", "value") - .get(); + IndexResponse indexResponse = internalCluster().client(notIsolatedNode).prepareIndex("test").setSource("field", "value").get(); assertThat(indexResponse.getVersion(), equalTo(1L)); logger.info("Verifying if document exists via node[{}]", notIsolatedNode); @@ -514,7 +512,8 @@ public void testRestartNodeWhileIndexing() throws Exception { while (stopped.get() == false && docID.get() < 5000) { String id = Integer.toString(docID.incrementAndGet()); try { - IndexResponse response = client().prepareIndex(index, "_doc", id) + IndexResponse response = client().prepareIndex(index) + .setId(id) .setSource(Collections.singletonMap("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); assertThat(response.getResult(), is(oneOf(CREATED, UPDATED))); diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/MasterDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/MasterDisruptionIT.java index 06fc638b299aa..5f90e15701331 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/MasterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/MasterDisruptionIT.java @@ -298,9 +298,9 @@ public void testMappingTimeout() throws Exception { disruption.startDisrupting(); BulkRequestBuilder bulk = client().prepareBulk(); - bulk.add(client().prepareIndex("test", "doc", "2").setSource("{ \"f\": 1 }", XContentType.JSON)); - bulk.add(client().prepareIndex("test", "doc", "3").setSource("{ \"g\": 1 }", XContentType.JSON)); - bulk.add(client().prepareIndex("test", "doc", "4").setSource("{ \"f\": 1 }", XContentType.JSON)); + bulk.add(client().prepareIndex("test").setId("2").setSource("{ \"f\": 1 }", XContentType.JSON)); + bulk.add(client().prepareIndex("test").setId("3").setSource("{ \"g\": 1 }", XContentType.JSON)); + bulk.add(client().prepareIndex("test").setId("4").setSource("{ \"f\": 1 }", XContentType.JSON)); BulkResponse bulkResponse = bulk.get(); assertTrue(bulkResponse.hasFailures()); diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/SnapshotDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/SnapshotDisruptionIT.java index 4fce66af21101..086aeb695c411 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/SnapshotDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/SnapshotDisruptionIT.java @@ -244,7 +244,7 @@ public void testMasterFailOverDuringShardSnapshots() throws Exception { final String indexName = "index-one"; createIndex(indexName); - client().prepareIndex(indexName, "_doc").setSource("foo", "bar").get(); + client().prepareIndex(indexName).setSource("foo", "bar").get(); blockDataNode(repoName, dataNode); @@ -294,7 +294,7 @@ private void createRandomIndex(String idxName) throws InterruptedException { final int numdocs = randomIntBetween(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(idxName, "type1", Integer.toString(i)).setSource("field1", "bar " + i); + builders[i] = client().prepareIndex(idxName).setId(Integer.toString(i)).setSource("field1", "bar " + i); } indexRandom(true, builders); } diff --git a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java index 4ca281fad157a..f285d8a6f291f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java @@ -83,7 +83,6 @@ public void testIndexActions() throws Exception { logger.info("Indexing [type1/1]"); IndexResponse indexResponse = client().prepareIndex() .setIndex("test") - .setType("type1") .setId("1") .setSource(source("1", "test")) .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -141,7 +140,7 @@ public void testIndexActions() throws Exception { } logger.info("Delete [type1/1]"); - DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").execute().actionGet(); + DeleteResponse deleteResponse = client().prepareDelete("test", "1").execute().actionGet(); assertThat(deleteResponse.getIndex(), equalTo(getConcreteIndexName())); assertThat(deleteResponse.getId(), equalTo("1")); logger.info("Refreshing"); @@ -211,12 +210,12 @@ public void testBulk() throws Exception { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex().setIndex("test").setType("type1").setId("1").setSource(source("1", "test"))) - .add(client().prepareIndex().setIndex("test").setType("type1").setId("2").setSource(source("2", "test")).setCreate(true)) - .add(client().prepareIndex().setIndex("test").setType("type1").setSource(source("3", "test"))) - .add(client().prepareIndex().setIndex("test").setType("type1").setCreate(true).setSource(source("4", "test"))) - .add(client().prepareDelete().setIndex("test").setType("type1").setId("1")) - .add(client().prepareIndex().setIndex("test").setType("type1").setSource("{ xxx }", XContentType.JSON)) // failure + .add(client().prepareIndex().setIndex("test").setId("1").setSource(source("1", "test"))) + .add(client().prepareIndex().setIndex("test").setId("2").setSource(source("2", "test")).setCreate(true)) + .add(client().prepareIndex().setIndex("test").setSource(source("3", "test"))) + .add(client().prepareIndex().setIndex("test").setCreate(true).setSource(source("4", "test"))) + .add(client().prepareDelete().setIndex("test").setId("1")) + .add(client().prepareIndex().setIndex("test").setSource("{ xxx }", XContentType.JSON)) // failure .execute() .actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java b/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java index 41b34516c30bb..be1335bd56ba9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java @@ -60,18 +60,15 @@ public class ShardInfoIT extends OpenSearchIntegTestCase { public void testIndexAndDelete() throws Exception { prepareIndex(1); - IndexResponse indexResponse = client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON).get(); + IndexResponse indexResponse = client().prepareIndex("idx").setSource("{}", XContentType.JSON).get(); assertShardInfo(indexResponse); - DeleteResponse deleteResponse = client().prepareDelete("idx", "type", indexResponse.getId()).get(); + DeleteResponse deleteResponse = client().prepareDelete("idx", indexResponse.getId()).get(); assertShardInfo(deleteResponse); } public void testUpdate() throws Exception { prepareIndex(1); - UpdateResponse updateResponse = client().prepareUpdate("idx", "type", "1") - .setDoc("{}", XContentType.JSON) - .setDocAsUpsert(true) - .get(); + UpdateResponse updateResponse = client().prepareUpdate("idx", "1").setDoc("{}", XContentType.JSON).setDocAsUpsert(true).get(); assertShardInfo(updateResponse); } @@ -79,7 +76,7 @@ public void testBulkWithIndexAndDeleteItems() throws Exception { prepareIndex(1); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); for (int i = 0; i < 10; i++) { - bulkRequestBuilder.add(client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("idx").setSource("{}", XContentType.JSON)); } BulkResponse bulkResponse = bulkRequestBuilder.get(); @@ -87,7 +84,7 @@ public void testBulkWithIndexAndDeleteItems() throws Exception { for (BulkItemResponse item : bulkResponse) { assertThat(item.isFailed(), equalTo(false)); assertShardInfo(item.getResponse()); - bulkRequestBuilder.add(client().prepareDelete("idx", "type", item.getId())); + bulkRequestBuilder.add(client().prepareDelete("idx", item.getId())); } bulkResponse = bulkRequestBuilder.get(); @@ -101,9 +98,7 @@ public void testBulkWithUpdateItems() throws Exception { prepareIndex(1); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); for (int i = 0; i < 10; i++) { - bulkRequestBuilder.add( - client().prepareUpdate("idx", "type", Integer.toString(i)).setDoc("{}", XContentType.JSON).setDocAsUpsert(true) - ); + bulkRequestBuilder.add(client().prepareUpdate("idx", Integer.toString(i)).setDoc("{}", XContentType.JSON).setDocAsUpsert(true)); } BulkResponse bulkResponse = bulkRequestBuilder.get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/env/NodeEnvironmentIT.java b/server/src/internalClusterTest/java/org/opensearch/env/NodeEnvironmentIT.java index fc38387b5e587..0bebcce27f975 100644 --- a/server/src/internalClusterTest/java/org/opensearch/env/NodeEnvironmentIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/env/NodeEnvironmentIT.java @@ -104,7 +104,7 @@ public Settings onNodeStopped(String nodeName) { internalCluster().startNode(dataPathSettings); logger.info("--> indexing a simple document"); - client().prepareIndex(indexName, "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex(indexName).setId("1").setSource("field1", "value1").get(); logger.info("--> restarting the node without the data role"); ex = expectThrows( diff --git a/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java index c90aa333604d3..2547333490f23 100644 --- a/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java @@ -65,7 +65,7 @@ public void testRepurpose() throws Exception { prepareCreate(indexName, Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get(); logger.info("--> indexing a simple document"); - client().prepareIndex(indexName, "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex(indexName).setId("1").setSource("field1", "value1").get(); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java b/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java index 178a424d07a7c..78069970c1a60 100644 --- a/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java @@ -63,7 +63,7 @@ public void testSimple() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen("test"); - client().prepareIndex("test", "test", "1").setSource("field", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field", "value1").get(); ExplainResponse response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); @@ -120,7 +120,8 @@ public void testExplainWithFields() throws Exception { ); ensureGreen("test"); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject().startObject("obj1").field("field1", "value1").field("field2", "value2").endObject().endObject() ) @@ -178,7 +179,8 @@ public void testExplainWithSource() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); ensureGreen("test"); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject().startObject("obj1").field("field1", "value1").field("field2", "value2").endObject().endObject() ) @@ -215,7 +217,7 @@ public void testExplainWithFilteredAlias() { ); ensureGreen("test"); - client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value1").get(); refresh(); ExplainResponse response = client().prepareExplain("alias1", "1").setQuery(QueryBuilders.matchAllQuery()).get(); @@ -234,7 +236,7 @@ public void testExplainWithFilteredAliasFetchSource() throws Exception { ); ensureGreen("test"); - client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value1").get(); refresh(); ExplainResponse response = client().prepareExplain("alias1", "1") @@ -261,7 +263,7 @@ public void testExplainDateRangeInQueryString() { String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1)); String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1)); - client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); + client().prepareIndex("test").setId("1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java index 4c0fa15a55824..1779fe025887a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java @@ -174,7 +174,7 @@ public void testSimpleOpenClose() throws Exception { ); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); @@ -188,14 +188,14 @@ public void testSimpleOpenClose() throws Exception { logger.info("--> trying to index into a closed index ..."); try { - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet(); fail(); } catch (IndexClosedException e) { // all is well } logger.info("--> creating another index (test2) by indexing into it"); - client().prepareIndex("test2", "type1", "1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test2").setId("1").setSource("field1", "value1").execute().actionGet(); logger.info("--> verifying that the state is green"); ensureGreen(); @@ -234,7 +234,7 @@ public void testSimpleOpenClose() throws Exception { logger.info("--> trying to index into a closed index ..."); try { - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet(); fail(); } catch (IndexClosedException e) { // all is well @@ -259,7 +259,7 @@ public void testSimpleOpenClose() throws Exception { assertThat(getResponse.isExists(), equalTo(true)); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "2").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test").setId("2").setSource("field1", "value1").execute().actionGet(); } public void testJustMasterNode() throws Exception { @@ -304,7 +304,7 @@ public void testJustMasterNodeAndJustDataNode() { logger.info("--> create an index"); client().admin().indices().prepareCreate("test").execute().actionGet(); - client().prepareIndex("test", "type1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test").setSource("field1", "value1").execute().actionGet(); } public void testTwoNodesSingleDoc() throws Exception { @@ -314,7 +314,7 @@ public void testTwoNodesSingleDoc() throws Exception { internalCluster().startNodes(2); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); ClusterHealthResponse health = client().admin() @@ -429,7 +429,7 @@ public void testRecoverBrokenIndexMetadata() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); @@ -516,7 +516,7 @@ public void testRecoverMissingAnalyzer() throws Exception { ) .get(); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value one").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value one").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); @@ -567,7 +567,7 @@ public void testRecoverMissingAnalyzer() throws Exception { public void testArchiveBrokenClusterSettings() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java index c6f1996bd7eff..2731eb9a290d6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java @@ -135,7 +135,6 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { client().admin() .indices() .preparePutMapping(index) - .setType("_doc") .setSource( jsonBuilder().startObject() .startObject("properties") @@ -147,11 +146,9 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { ) .get(); - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes("_doc").get(); + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).get(); assertNotNull( - ((Map) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get( - "integer_field" - ) + ((Map) (getMappingsResponse.getMappings().get(index).getSourceAsMap().get("properties"))).get("integer_field") ); // make sure it was also written on red node although index is closed @@ -175,7 +172,6 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { client().admin() .indices() .preparePutMapping(index) - .setType("_doc") .setSource( jsonBuilder().startObject() .startObject("properties") @@ -187,11 +183,9 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { ) .get(); - getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes("_doc").get(); + getMappingsResponse = client().admin().indices().prepareGetMappings(index).get(); assertNotNull( - ((Map) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get( - "float_field" - ) + ((Map) (getMappingsResponse.getMappings().get(index).getSourceAsMap().get("properties"))).get("float_field") ); // make sure it was also written on red node although index is closed diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/QuorumGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/QuorumGatewayIT.java index 624c2f1b51b9d..1e190d3bec345 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/QuorumGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/QuorumGatewayIT.java @@ -66,11 +66,11 @@ public void testQuorumRecovery() throws Exception { final NumShards test = getNumShards("test"); logger.info("--> indexing..."); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); // We don't check for failures in the flush response: if we do we might get the following: // FlushNotAllowedEngineException[[test][1] recovery is in progress, flush [COMMIT_TRANSLOG] is not allowed] flush(); - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).get(); + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).get(); refresh(); for (int i = 0; i < 10; i++) { @@ -95,7 +95,8 @@ public void doAfterNodes(int numNodes, final Client activeClient) throws Excepti }, 30, TimeUnit.SECONDS); logger.info("--> one node is closed -- index 1 document into the remaining nodes"); - activeClient.prepareIndex("test", "type1", "3") + activeClient.prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("field", "value3").endObject()) .get(); assertNoFailures(activeClient.admin().indices().prepareRefresh().get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java index 217c422f9335f..612abee7dbf5b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java @@ -126,23 +126,28 @@ public void testOneNodeRecoverFromGateway() throws Exception { ); assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); - client().prepareIndex("test", "type1", "10990239") + client().prepareIndex("test") + .setId("10990239") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).value(179).endArray().endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type1", "10990473") + client().prepareIndex("test") + .setId("10990473") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).endArray().endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type1", "10990513") + client().prepareIndex("test") + .setId("10990513") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).value(179).endArray().endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type1", "10990695") + client().prepareIndex("test") + .setId("10990695") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).endArray().endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type1", "11026351") + client().prepareIndex("test") + .setId("11026351") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).endArray().endObject()) .execute() .actionGet(); @@ -309,12 +314,14 @@ public void testSingleNodeNoFlush() throws Exception { public void testSingleNodeWithFlush() throws Exception { internalCluster().startNode(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("field", "value1").endObject()) .execute() .actionGet(); flush(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("field", "value2").endObject()) .execute() .actionGet(); @@ -352,12 +359,14 @@ public void testTwoNodeFirstNodeCleared() throws Exception { final String firstNode = internalCluster().startNode(); internalCluster().startNode(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("field", "value1").endObject()) .execute() .actionGet(); flush(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("field", "value2").endObject()) .execute() .actionGet(); @@ -408,12 +417,14 @@ public void testLatestVersionLoaded() throws Exception { Settings node2DataPathSettings = internalCluster().dataPathSettings(nodes.get(1)); assertAcked(client().admin().indices().prepareCreate("test")); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("field", "value1").endObject()) .execute() .actionGet(); client().admin().indices().prepareFlush().execute().actionGet(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("field", "value2").endObject()) .execute() .actionGet(); @@ -433,7 +444,8 @@ public void testLatestVersionLoaded() throws Exception { internalCluster().stopRandomDataNode(); logger.info("--> one node is closed - start indexing data into the second one"); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("field", "value3").endObject()) .execute() .actionGet(); @@ -530,7 +542,7 @@ public void testReuseInFileBasedPeerRecovery() throws Exception { logger.info("--> indexing docs"); int numDocs = randomIntBetween(1, 1024); for (int i = 0; i < numDocs; i++) { - client(primaryNode).prepareIndex("test", "type").setSource("field", "value").execute().actionGet(); + client(primaryNode).prepareIndex("test").setSource("field", "value").execute().actionGet(); } client(primaryNode).admin().indices().prepareFlush("test").setForce(true).get(); @@ -563,7 +575,7 @@ public void testReuseInFileBasedPeerRecovery() throws Exception { public Settings onNodeStopped(String nodeName) throws Exception { // index some more documents; we expect to reuse the files that already exist on the replica for (int i = 0; i < moreDocs; i++) { - client(primaryNode).prepareIndex("test", "type").setSource("field", "value").execute().actionGet(); + client(primaryNode).prepareIndex("test").setSource("field", "value").execute().actionGet(); } // prevent a sequence-number-based recovery from being possible diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java index e9414fd651ca0..345ed668a3bf4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java @@ -113,7 +113,7 @@ public void testPreferCopyCanPerformNoopRecovery() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(100, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -123,7 +123,7 @@ public void testPreferCopyCanPerformNoopRecovery() throws Exception { false, randomBoolean(), IntStream.range(0, between(0, 80)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); } @@ -192,7 +192,7 @@ public void testRecentPrimaryInformation() throws Exception { false, randomBoolean(), IntStream.range(0, between(10, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodeWithReplica)); @@ -202,7 +202,7 @@ public void testRecentPrimaryInformation() throws Exception { false, randomBoolean(), IntStream.range(0, between(10, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); } @@ -231,7 +231,7 @@ public void testRecentPrimaryInformation() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(50, 200)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -288,7 +288,7 @@ public void testFullClusterRestartPerformNoopRecovery() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(200, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -297,7 +297,7 @@ public void testFullClusterRestartPerformNoopRecovery() throws Exception { false, randomBoolean(), IntStream.range(0, between(0, 80)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); if (randomBoolean()) { @@ -350,7 +350,7 @@ public void testPreferCopyWithHighestMatchingOperations() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(200, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -364,7 +364,7 @@ public void testPreferCopyWithHighestMatchingOperations() throws Exception { false, randomBoolean(), IntStream.range(0, between(1, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); ensureActivePeerRecoveryRetentionLeasesAdvanced(indexName); @@ -376,7 +376,7 @@ public void testPreferCopyWithHighestMatchingOperations() throws Exception { false, randomBoolean(), IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); @@ -422,7 +422,7 @@ public void testDoNotCancelRecoveryForBrokenNode() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(200, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -474,7 +474,7 @@ public void testPeerRecoveryForClosedIndices() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(1, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(Collectors.toList()) ); ensureActivePeerRecoveryRetentionLeasesAdvanced(indexName); @@ -536,7 +536,7 @@ public void testSimulateRecoverySourceOnOldNode() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(200, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java index 327e35dbc7d0b..30cb18669ebbd 100644 --- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java @@ -94,7 +94,7 @@ public void testSimpleGet() { assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); logger.info("--> non realtime get 1"); response = client().prepareGet(indexOrAlias(), "1").setRealtime(false).get(); @@ -181,7 +181,7 @@ public void testSimpleGet() { assertThat(response.getField("field2"), nullValue()); logger.info("--> update doc 1"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1_1", "field2", "value2_1").get(); logger.info("--> realtime get 1"); response = client().prepareGet(indexOrAlias(), "1").get(); @@ -191,7 +191,7 @@ public void testSimpleGet() { assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_1")); logger.info("--> update doc 1 again"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_2", "field2", "value2_2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1_2", "field2", "value2_2").get(); response = client().prepareGet(indexOrAlias(), "1").get(); assertThat(response.isExists(), equalTo(true)); @@ -199,7 +199,7 @@ public void testSimpleGet() { assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_2")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_2")); - DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").get(); + DeleteResponse deleteResponse = client().prepareDelete("test", "1").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); response = client().prepareGet(indexOrAlias(), "1").get(); @@ -217,7 +217,7 @@ public void testGetWithAliasPointingToMultipleIndices() { } else { client().admin().indices().prepareCreate("index3").addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).get(); } - IndexResponse indexResponse = client().prepareIndex("index1", "type", "id").setSource(Collections.singletonMap("foo", "bar")).get(); + IndexResponse indexResponse = client().prepareIndex("index1").setId("id").setSource(Collections.singletonMap("foo", "bar")).get(); assertThat(indexResponse.status().getStatus(), equalTo(RestStatus.CREATED.getStatus())); IllegalArgumentException exception = expectThrows( @@ -244,7 +244,7 @@ public void testSimpleMultiGet() throws Exception { assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } response = client().prepareMultiGet() @@ -308,7 +308,7 @@ public void testGetDocWithMultivaluedFields() throws Exception { assertThat(response.isExists(), equalTo(false)); assertThat(response.isExists(), equalTo(false)); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get(); response = client().prepareGet("test", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); @@ -339,7 +339,7 @@ public void testGetWithVersion() { assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); // From translog: @@ -383,7 +383,7 @@ public void testGetWithVersion() { } logger.info("--> index doc 1 again, so increasing the version"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); // From translog: @@ -438,7 +438,7 @@ public void testMultiGetWithVersion() throws Exception { assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 3; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog @@ -488,7 +488,7 @@ public void testMultiGetWithVersion() throws Exception { assertThat(response.getResponses()[2].getFailure().getFailure(), instanceOf(VersionConflictEngineException.class)); for (int i = 0; i < 3; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog @@ -562,7 +562,8 @@ public void testGetFieldsNonLeafField() throws Exception { .setSettings(Settings.builder().put("index.refresh_interval", -1)) ); - client().prepareIndex("test", "my-type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .get(); @@ -640,7 +641,7 @@ public void testGetFieldsComplexField() throws Exception { logger.info("indexing documents"); - client().prepareIndex("my-index", "my-type", "1").setSource(source, XContentType.JSON).get(); + client().prepareIndex("my-index").setId("1").setSource(source, XContentType.JSON).get(); logger.info("checking real time retrieval"); @@ -732,7 +733,7 @@ public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource, XContentType.JSON)); ensureGreen(); - client().prepareIndex("test", "_doc", "1").setRouting("routingValue").setId("1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setRouting("routingValue").setId("1").setSource("{}", XContentType.JSON).get(); String[] fieldsList = { "_routing" }; // before refresh - document is only in translog @@ -756,7 +757,7 @@ public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource, XContentType.JSON)); ensureGreen(); String doc = "{\n" + " \"text\": \"some text.\"\n" + "}\n"; - client().prepareIndex("test", "_doc").setId("1").setSource(doc, XContentType.JSON).setRouting("1").get(); + client().prepareIndex("test").setId("1").setSource(doc, XContentType.JSON).setRouting("1").get(); String[] fieldsList = { "_routing" }; // before refresh - document is only in translog assertGetFieldsAlwaysWorks(indexOrAlias(), "_doc", "1", fieldsList, "1"); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java index 359d40e3b7b9f..3b2695ad7896e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java @@ -107,7 +107,7 @@ public void testFinalPipelineCantChangeDestination() { final IllegalStateException e = expectThrows( IllegalStateException.class, - () -> client().prepareIndex("index", "_doc").setId("1").setSource(Collections.singletonMap("field", "value")).get() + () -> client().prepareIndex("index").setId("1").setSource(Collections.singletonMap("field", "value")).get() ); assertThat(e, hasToString(containsString("final pipeline [final_pipeline] can't change the target index"))); } @@ -128,7 +128,7 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { BytesReference finalPipelineBody = new BytesArray("{\"processors\": [{\"final\": {\"exists\":\"no_such_field\"}}]}"); client().admin().cluster().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); - IndexResponse indexResponse = client().prepareIndex("index", "_doc") + IndexResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Collections.singletonMap("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -155,7 +155,7 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { BytesReference finalPipelineBody = new BytesArray("{\"processors\": [{\"final\": {}}]}"); client().admin().cluster().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); - IndexResponse indexResponse = client().prepareIndex("index", "_doc") + IndexResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Collections.singletonMap("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -185,7 +185,7 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { .putPipeline(new PutPipelineRequest("target_default_pipeline", targetPipeline, XContentType.JSON)) .actionGet(); - IndexResponse indexResponse = client().prepareIndex("index", "_doc") + IndexResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Collections.singletonMap("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -203,7 +203,7 @@ public void testFinalPipeline() { // this asserts that the final_pipeline was used, without us having to actually create the pipeline etc. final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareIndex("index", "_doc", "1").setSource(Collections.singletonMap("field", "value")).get() + () -> client().prepareIndex("index").setId("1").setSource(Collections.singletonMap("field", "value")).get() ); assertThat(e, hasToString(containsString("pipeline with id [final_pipeline] does not exist"))); } @@ -218,7 +218,7 @@ public void testRequestPipelineAndFinalPipeline() { client().admin().cluster().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); final Settings settings = Settings.builder().put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline").build(); createIndex("index", settings); - final IndexRequestBuilder index = client().prepareIndex("index", "_doc", "1"); + final IndexRequestBuilder index = client().prepareIndex("index").setId("1"); index.setSource(Collections.singletonMap("field", "value")); index.setPipeline("request_pipeline"); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); @@ -247,7 +247,7 @@ public void testDefaultAndFinalPipeline() { .put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline") .build(); createIndex("index", settings); - final IndexRequestBuilder index = client().prepareIndex("index", "_doc", "1"); + final IndexRequestBuilder index = client().prepareIndex("index").setId("1"); index.setSource(Collections.singletonMap("field", "value")); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); final IndexResponse response = index.get(); @@ -297,7 +297,7 @@ public void testDefaultAndFinalPipelineFromTemplates() { .setOrder(finalPipelineOrder) .setSettings(finalPipelineSettings) .get(); - final IndexRequestBuilder index = client().prepareIndex("index", "_doc", "1"); + final IndexRequestBuilder index = client().prepareIndex("index").setId("1"); index.setSource(Collections.singletonMap("field", "value")); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); final IndexResponse response = index.get(); @@ -337,7 +337,7 @@ public void testHighOrderFinalPipelinePreferred() throws IOException { // this asserts that the high_order_final_pipeline was selected, without us having to actually create the pipeline etc. final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareIndex("index", "_doc", "1").setSource(Collections.singletonMap("field", "value")).get() + () -> client().prepareIndex("index").setId("1").setSource(Collections.singletonMap("field", "value")).get() ); assertThat(e, hasToString(containsString("pipeline with id [high_order_final_pipeline] does not exist"))); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java index 9ee0347142c6e..54fbc8cecb967 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java @@ -63,7 +63,7 @@ public void testHiddenIndexSearch() { assertAcked( client().admin().indices().prepareCreate("hidden-index").setSettings(Settings.builder().put("index.hidden", true).build()).get() ); - client().prepareIndex("hidden-index", "_doc").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); // default not visible to wildcard expansion SearchResponse searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index")) @@ -95,7 +95,7 @@ public void testHiddenIndexSearch() { .setSettings(Settings.builder().put("index.hidden", true).build()) .get() ); - client().prepareIndex(".hidden-index", "_doc").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex(".hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); searchResponse = client().prepareSearch(randomFrom(".*", ".hidden-*")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get(); matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> ".hidden-index".equals(hit.getIndex())); assertTrue(matchedHidden); @@ -160,7 +160,7 @@ public void testGlobalTemplatesDoNotApply() { GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings("a_hidden_index").get(); assertThat(mappingsResponse.mappings().size(), is(1)); - MappingMetadata mappingMetadata = mappingsResponse.mappings().get("a_hidden_index").get("_doc"); + MappingMetadata mappingMetadata = mappingsResponse.mappings().get("a_hidden_index"); assertNotNull(mappingMetadata); Map propertiesMap = (Map) mappingMetadata.getSourceAsMap().get("properties"); assertNotNull(propertiesMap); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/IndexRequestBuilderIT.java b/server/src/internalClusterTest/java/org/opensearch/index/IndexRequestBuilderIT.java index 75590686fdefe..9432f28a0a59e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/IndexRequestBuilderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/IndexRequestBuilderIT.java @@ -53,13 +53,13 @@ public void testSetSource() throws InterruptedException, ExecutionException { Map map = new HashMap<>(); map.put("test_field", "foobar"); IndexRequestBuilder[] builders = new IndexRequestBuilder[] { - client().prepareIndex("test", "test").setSource((Object) "test_field", (Object) "foobar"), - client().prepareIndex("test", "test").setSource("{\"test_field\" : \"foobar\"}", XContentType.JSON), - client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}"), XContentType.JSON), - client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}"), XContentType.JSON), - client().prepareIndex("test", "test") + client().prepareIndex("test").setSource("test_field", "foobar"), + client().prepareIndex("test").setSource("{\"test_field\" : \"foobar\"}", XContentType.JSON), + client().prepareIndex("test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}"), XContentType.JSON), + client().prepareIndex("test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}"), XContentType.JSON), + client().prepareIndex("test") .setSource(BytesReference.toBytes(new BytesArray("{\"test_field\" : \"foobar\"}")), XContentType.JSON), - client().prepareIndex("test", "test").setSource(map) }; + client().prepareIndex("test").setSource(map) }; indexRandom(true, builders); SearchResponse searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.termQuery("test_field", "foobar")).get(); OpenSearchAssertions.assertHitCount(searchResponse, builders.length); @@ -67,7 +67,7 @@ public void testSetSource() throws InterruptedException, ExecutionException { public void testOddNumberOfSourceObjects() { try { - client().prepareIndex("test", "test").setSource("test_field", "foobar", new Object()); + client().prepareIndex("test").setSource("test_field", "foobar", new Object()); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("The number of object passed must be even but was [3]")); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java index 2972cfd7015a1..c0bc9d29af992 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java @@ -94,7 +94,8 @@ public void testIndexSort() { .putList("index.sort.field", "date", "numeric_dv", "keyword_dv") ).addMapping("test", TEST_MAPPING).get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("test", "test", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("numeric_dv", randomInt(), "keyword_dv", randomAlphaOfLengthBetween(10, 20)) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/WaitUntilRefreshIT.java b/server/src/internalClusterTest/java/org/opensearch/index/WaitUntilRefreshIT.java index 40e6819ee9f9a..e38b128c04fde 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/WaitUntilRefreshIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/WaitUntilRefreshIT.java @@ -83,7 +83,8 @@ public void createTestIndex() { } public void testIndex() { - IndexResponse index = client().prepareIndex("test", "index", "1") + IndexResponse index = client().prepareIndex("test") + .setId("1") .setSource("foo", "bar") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .get(); @@ -94,11 +95,11 @@ public void testIndex() { public void testDelete() throws InterruptedException, ExecutionException { // Index normally - indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "bar")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "bar")); assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get(), "1"); // Now delete with blockUntilRefresh - DeleteResponse delete = client().prepareDelete("test", "test", "1").setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get(); + DeleteResponse delete = client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get(); assertEquals(DocWriteResponse.Result.DELETED, delete.getResult()); assertFalse("request shouldn't have forced a refresh", delete.forcedRefresh()); assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get()); @@ -106,11 +107,11 @@ public void testDelete() throws InterruptedException, ExecutionException { public void testUpdate() throws InterruptedException, ExecutionException { // Index normally - indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "bar")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "bar")); assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get(), "1"); // Update with RefreshPolicy.WAIT_UNTIL - UpdateResponse update = client().prepareUpdate("test", "test", "1") + UpdateResponse update = client().prepareUpdate("test", "1") .setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "baz") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .get(); @@ -119,7 +120,7 @@ public void testUpdate() throws InterruptedException, ExecutionException { assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "baz")).get(), "1"); // Upsert with RefreshPolicy.WAIT_UNTIL - update = client().prepareUpdate("test", "test", "2") + update = client().prepareUpdate("test", "2") .setDocAsUpsert(true) .setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "cat") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) @@ -129,7 +130,7 @@ public void testUpdate() throws InterruptedException, ExecutionException { assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "cat")).get(), "2"); // Update-becomes-delete with RefreshPolicy.WAIT_UNTIL - update = client().prepareUpdate("test", "test", "2") + update = client().prepareUpdate("test", "2") .setScript(new Script(ScriptType.INLINE, "mockscript", "delete_plz", emptyMap())) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .get(); @@ -141,25 +142,25 @@ public void testUpdate() throws InterruptedException, ExecutionException { public void testBulk() { // Index by bulk with RefreshPolicy.WAIT_UNTIL BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - bulk.add(client().prepareIndex("test", "test", "1").setSource("foo", "bar")); + bulk.add(client().prepareIndex("test").setId("1").setSource("foo", "bar")); assertBulkSuccess(bulk.get()); assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get(), "1"); // Update by bulk with RefreshPolicy.WAIT_UNTIL bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - bulk.add(client().prepareUpdate("test", "test", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "baz")); + bulk.add(client().prepareUpdate("test", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "baz")); assertBulkSuccess(bulk.get()); assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "baz")).get(), "1"); // Delete by bulk with RefreshPolicy.WAIT_UNTIL bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - bulk.add(client().prepareDelete("test", "test", "1")); + bulk.add(client().prepareDelete("test", "1")); assertBulkSuccess(bulk.get()); assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get()); // Update makes a noop bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - bulk.add(client().prepareDelete("test", "test", "1")); + bulk.add(client().prepareDelete("test", "1")); assertBulkSuccess(bulk.get()); } @@ -169,7 +170,8 @@ public void testBulk() { */ public void testNoRefreshInterval() throws InterruptedException, ExecutionException { client().admin().indices().prepareUpdateSettings("test").setSettings(singletonMap("index.refresh_interval", -1)).get(); - ActionFuture index = client().prepareIndex("test", "index", "1") + ActionFuture index = client().prepareIndex("test") + .setId("1") .setSource("foo", "bar") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .execute(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java index 95c03a306a897..da3b30030581f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java @@ -123,7 +123,7 @@ public void testMaxDocsLimit() throws Exception { assertThat(indexingResult.numSuccess, equalTo(0)); final IllegalArgumentException deleteError = expectThrows( IllegalArgumentException.class, - () -> client().prepareDelete("test", "_doc", "any-id").get() + () -> client().prepareDelete("test", "any-id").get() ); assertThat(deleteError.getMessage(), containsString("Number of documents in the index can't exceed [" + maxDocs.get() + "]")); client().admin().indices().prepareRefresh("test").get(); @@ -206,7 +206,7 @@ static IndexingResult indexDocs(int numRequests, int numThreads) throws Exceptio phaser.arriveAndAwaitAdvance(); while (completedRequests.incrementAndGet() <= numRequests) { try { - final IndexResponse resp = client().prepareIndex("test", "_doc").setSource("{}", XContentType.JSON).get(); + final IndexResponse resp = client().prepareIndex("test").setSource("{}", XContentType.JSON).get(); numSuccess.incrementAndGet(); assertThat(resp.status(), equalTo(RestStatus.CREATED)); } catch (IllegalArgumentException e) { diff --git a/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java b/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java index a60b8241d5ea5..0aa2abed14b79 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java @@ -60,7 +60,7 @@ public void testEagerGlobalOrdinalsFieldDataLoading() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("name", "name").get(); + client().prepareIndex("test").setId("1").setSource("name", "name").get(); client().admin().indices().prepareRefresh("test").get(); ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java index 192be5f3c4369..f2cc3c289e8e4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java @@ -56,7 +56,7 @@ public void testDynamicTemplateCopyTo() throws Exception { int recordCount = between(1, 200); for (int i = 0; i < recordCount * 2; i++) { - client().prepareIndex("test-idx", "_doc", Integer.toString(i)).setSource("test_field", "test " + i, "even", i % 2 == 0).get(); + client().prepareIndex("test-idx").setId(Integer.toString(i)).setSource("test_field", "test " + i, "even", i % 2 == 0).get(); } client().admin().indices().prepareRefresh("test-idx").execute().actionGet(); @@ -92,7 +92,7 @@ public void testDynamicObjectCopyTo() throws Exception { .endObject() ); assertAcked(client().admin().indices().prepareCreate("test-idx").addMapping("_doc", mapping, XContentType.JSON)); - client().prepareIndex("test-idx", "_doc", "1").setSource("foo", "bar").get(); + client().prepareIndex("test-idx").setId("1").setSource("foo", "bar").get(); client().admin().indices().prepareRefresh("test-idx").execute().actionGet(); SearchResponse response = client().prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get(); assertThat(response.getHits().getTotalHits().value, equalTo(1L)); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java index cb01295ae734c..d5924155e2ec7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java @@ -38,7 +38,6 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.ClusterStateUpdateTask; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -68,9 +67,9 @@ protected Collection> nodePlugins() { public void testConflictingDynamicMappings() { // we don't use indexRandom because the order of requests is important here createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("foo", 3).get(); + client().prepareIndex("index").setId("1").setSource("foo", 3).get(); try { - client().prepareIndex("index", "type", "2").setSource("foo", "bar").get(); + client().prepareIndex("index").setId("2").setSource("foo", "bar").get(); fail("Indexing request should have failed!"); } catch (MapperParsingException e) { // general case, the parsing code complains that it can't parse "bar" as a "long" @@ -86,19 +85,17 @@ public void testConflictingDynamicMappings() { public void testConflictingDynamicMappingsBulk() { // we don't use indexRandom because the order of requests is important here createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("foo", 3).get(); - BulkResponse bulkResponse = client().prepareBulk().add(client().prepareIndex("index", "type", "1").setSource("foo", 3)).get(); + client().prepareIndex("index").setId("1").setSource("foo", 3).get(); + BulkResponse bulkResponse = client().prepareBulk().add(client().prepareIndex("index").setId("1").setSource("foo", 3)).get(); assertFalse(bulkResponse.hasFailures()); - bulkResponse = client().prepareBulk().add(client().prepareIndex("index", "type", "2").setSource("foo", "bar")).get(); + bulkResponse = client().prepareBulk().add(client().prepareIndex("index").setId("2").setSource("foo", "bar")).get(); assertTrue(bulkResponse.hasFailures()); } private static void assertMappingsHaveField(GetMappingsResponse mappings, String index, String field) throws IOException { - ImmutableOpenMap indexMappings = mappings.getMappings().get("index"); + MappingMetadata indexMappings = mappings.getMappings().get("index"); assertNotNull(indexMappings); - MappingMetadata typeMappings = indexMappings.get(MapperService.SINGLE_MAPPING_NAME); - assertNotNull(typeMappings); - Map typeMappingsMap = typeMappings.getSourceAsMap(); + Map typeMappingsMap = indexMappings.getSourceAsMap(); Map properties = (Map) typeMappingsMap.get("properties"); assertTrue("Could not find [" + field + "] in " + typeMappingsMap.toString(), properties.containsKey(field)); } @@ -117,7 +114,7 @@ public void run() { startLatch.await(); assertEquals( DocWriteResponse.Result.CREATED, - client().prepareIndex("index", "type", id).setSource("field" + id, "bar").get().getResult() + client().prepareIndex("index").setId(id).setSource("field" + id, "bar").get().getResult() ); } catch (Exception e) { error.compareAndSet(null, e); @@ -146,7 +143,7 @@ public void run() { public void testPreflightCheckAvoidsMaster() throws InterruptedException { createIndex("index", Settings.builder().put(INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), 2).build()); ensureGreen("index"); - client().prepareIndex("index", MapperService.SINGLE_MAPPING_NAME).setId("1").setSource("field1", "value1").get(); + client().prepareIndex("index").setId("1").setSource("field1", "value1").get(); final CountDownLatch masterBlockedLatch = new CountDownLatch(1); final CountDownLatch indexingCompletedLatch = new CountDownLatch(1); @@ -167,9 +164,7 @@ public void onFailure(String source, Exception e) { }); masterBlockedLatch.await(); - final IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index", MapperService.SINGLE_MAPPING_NAME) - .setId("2") - .setSource("field2", "value2"); + final IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index").setId("2").setSource("field2", "value2"); try { assertThat( expectThrows(IllegalArgumentException.class, () -> indexRequestBuilder.get(TimeValue.timeValueSeconds(10))).getMessage(), @@ -184,7 +179,7 @@ public void testMappingVersionAfterDynamicMappingUpdate() throws Exception { createIndex("test"); final ClusterService clusterService = internalCluster().clusterService(); final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion(); - client().prepareIndex("test", "_doc").setId("1").setSource("field", "text").get(); + client().prepareIndex("test").setId("1").setSource("field", "text").get(); assertBusy(() -> assertThat(clusterService.state().metadata().index("test").getMappingVersion(), equalTo(1 + previousVersion))); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java index 19a9265dc190c..c9f3ddbc9e8b1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java @@ -61,7 +61,7 @@ public void testMultiFields() throws Exception { assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createTypeSource())); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); Map titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource)); @@ -69,17 +69,17 @@ public void testMultiFields() throws Exception { assertThat(titleFields.get("not_analyzed"), notNullValue()); assertThat(((Map) titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword")); - client().prepareIndex("my-index", "my-type", "1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title", "multi")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title.not_analyzed", "Multi fields")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertAcked(client().admin().indices().preparePutMapping("my-index").setType("my-type").setSource(createPutMappingSource())); + assertAcked(client().admin().indices().preparePutMapping("my-index").setSource(createPutMappingSource())); getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); mappingSource = mappingMetadata.sourceAsMap(); assertThat(((Map) XContentMapValues.extractValue("properties.title", mappingSource)).size(), equalTo(2)); @@ -90,7 +90,7 @@ public void testMultiFields() throws Exception { assertThat(titleFields.get("uncased"), notNullValue()); assertThat(((Map) titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace")); - client().prepareIndex("my-index", "my-type", "1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title.uncased", "Multi")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -101,7 +101,7 @@ public void testGeoPointMultiField() throws Exception { assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("geo_point"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); @@ -115,7 +115,7 @@ public void testGeoPointMultiField() throws Exception { assertThat(bField.get("type").toString(), equalTo("keyword")); GeoPoint point = new GeoPoint(51, 19); - client().prepareIndex("my-index", "my-type", "1").setSource("a", point.toString()).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("a", point.toString()).setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index") .setSize(0) .setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS))) @@ -130,7 +130,7 @@ public void testCompletionMultiField() throws Exception { assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("completion"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); @@ -142,7 +142,7 @@ public void testCompletionMultiField() throws Exception { assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); - client().prepareIndex("my-index", "my-type", "1").setSource("a", "complete me").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("a", "complete me").setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -152,7 +152,7 @@ public void testIpMultiField() throws Exception { assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("ip"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); @@ -164,7 +164,7 @@ public void testIpMultiField() throws Exception { assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); - client().prepareIndex("my-index", "my-type", "1").setSource("a", "127.0.0.1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("a", "127.0.0.1").setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/query/plugin/CustomQueryParserIT.java b/server/src/internalClusterTest/java/org/opensearch/index/query/plugin/CustomQueryParserIT.java index 1d30cef96e012..60e9e28b65005 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/query/plugin/CustomQueryParserIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/query/plugin/CustomQueryParserIT.java @@ -54,7 +54,7 @@ public void setUp() throws Exception { super.setUp(); createIndex("test"); ensureGreen(); - client().prepareIndex("index", "type", "1").setSource("field", "value").get(); + client().prepareIndex("index").setId("1").setSource("field", "value").get(); refresh(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java index d4f4f79dc3408..6d76ee48a5b95 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java @@ -82,8 +82,8 @@ public void testZeroTermsQuery() throws ExecutionException, InterruptedException private List getIndexRequests() { List requests = new ArrayList<>(); - requests.add(client().prepareIndex(INDEX, "band").setSource("name", "the beatles")); - requests.add(client().prepareIndex(INDEX, "band").setSource("name", "led zeppelin")); + requests.add(client().prepareIndex(INDEX).setSource("name", "the beatles")); + requests.add(client().prepareIndex(INDEX).setSource("name", "led zeppelin")); return requests; } } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/seqno/GlobalCheckpointSyncIT.java b/server/src/internalClusterTest/java/org/opensearch/index/seqno/GlobalCheckpointSyncIT.java index 7ed7c36cb3449..ce7cb81dbd2df 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/seqno/GlobalCheckpointSyncIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/seqno/GlobalCheckpointSyncIT.java @@ -82,7 +82,7 @@ public void testGlobalCheckpointSyncWithAsyncDurability() throws Exception { for (int j = 0; j < 10; j++) { final String id = Integer.toString(j); - client().prepareIndex("test", "test", id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); + client().prepareIndex("test").setId(id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); } assertBusy(() -> { @@ -194,7 +194,7 @@ private void runGlobalCheckpointSyncTest( } for (int j = 0; j < numberOfDocuments; j++) { final String id = Integer.toString(index * numberOfDocuments + j); - client().prepareIndex("test", "test", id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); + client().prepareIndex("test").setId(id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); } try { barrier.await(); @@ -251,7 +251,7 @@ public void testPersistGlobalCheckpoint() throws Exception { } int numDocs = randomIntBetween(1, 20); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "test", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); } ensureGreen("test"); assertBusy(() -> { @@ -281,7 +281,7 @@ public void testPersistLocalCheckpoint() { logger.info("numDocs {}", numDocs); long maxSeqNo = 0; for (int i = 0; i < numDocs; i++) { - maxSeqNo = client().prepareIndex("test", "_doc").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get().getSeqNo(); + maxSeqNo = client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get().getSeqNo(); logger.info("got {}", maxSeqNo); } for (IndicesService indicesService : internalCluster().getDataNodeInstances(IndicesService.class)) { diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/GlobalCheckpointListenersIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/GlobalCheckpointListenersIT.java index 6fbf218ae0542..a2c5c0333bbfe 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/GlobalCheckpointListenersIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/GlobalCheckpointListenersIT.java @@ -88,7 +88,7 @@ public void accept(final long g, final Exception e) { } }, null); - client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); assertBusy(() -> assertThat(globalCheckpoint.get(), equalTo((long) index))); // adding a listener expecting a lower global checkpoint should fire immediately final AtomicLong immediateGlobalCheckpint = new AtomicLong(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java index fa0fcfdbea628..61101129b9b16 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java @@ -173,7 +173,7 @@ public void testLockTryingToDelete() throws Exception { public void testDurableFlagHasEffect() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "bar", "1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); @@ -193,20 +193,20 @@ public void testDurableFlagHasEffect() throws Exception { setDurability(shard, Translog.Durability.REQUEST); assertFalse(needsSync.test(translog)); setDurability(shard, Translog.Durability.ASYNC); - client().prepareIndex("test", "bar", "2").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{}", XContentType.JSON).get(); assertTrue(needsSync.test(translog)); setDurability(shard, Translog.Durability.REQUEST); - client().prepareDelete("test", "bar", "1").get(); + client().prepareDelete("test", "1").get(); assertFalse(needsSync.test(translog)); setDurability(shard, Translog.Durability.ASYNC); - client().prepareDelete("test", "bar", "2").get(); + client().prepareDelete("test", "2").get(); assertTrue(translog.syncNeeded()); setDurability(shard, Translog.Durability.REQUEST); assertNoFailures( client().prepareBulk() - .add(client().prepareIndex("test", "bar", "3").setSource("{}", XContentType.JSON)) - .add(client().prepareDelete("test", "bar", "1")) + .add(client().prepareIndex("test").setId("3").setSource("{}", XContentType.JSON)) + .add(client().prepareDelete("test", "1")) .get() ); assertFalse(needsSync.test(translog)); @@ -214,8 +214,8 @@ public void testDurableFlagHasEffect() throws Exception { setDurability(shard, Translog.Durability.ASYNC); assertNoFailures( client().prepareBulk() - .add(client().prepareIndex("test", "bar", "4").setSource("{}", XContentType.JSON)) - .add(client().prepareDelete("test", "bar", "3")) + .add(client().prepareIndex("test").setId("4").setSource("{}", XContentType.JSON)) + .add(client().prepareDelete("test", "3")) .get() ); setDurability(shard, Translog.Durability.REQUEST); @@ -252,7 +252,7 @@ public void testIndexDirIsDeletedWhenShardRemoved() throws Exception { Settings idxSettings = Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, idxPath).build(); createIndex("test", idxSettings); ensureGreen("test"); - client().prepareIndex("test", "bar", "1").setSource("{}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); SearchResponse response = client().prepareSearch("test").get(); assertHitCount(response, 1L); client().admin().indices().prepareDelete("test").get(); @@ -268,7 +268,7 @@ public void testExpectedShardSizeIsPresent() throws InterruptedException { .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) ); for (int i = 0; i < 50; i++) { - client().prepareIndex("test", "test").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setSource("{}", XContentType.JSON).get(); } ensureGreen("test"); InternalClusterInfoService clusterInfoService = (InternalClusterInfoService) getInstanceFromNode(ClusterInfoService.class); @@ -287,7 +287,7 @@ public void testIndexCanChangeCustomDataPath() throws Exception { logger.info("--> creating index [{}] with data_path [{}]", index, indexDataPath); createIndex(index, Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, indexDataPath.toAbsolutePath().toString()).build()); - client().prepareIndex(index, "bar", "1").setSource("foo", "bar").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex(index).setId("1").setSource("foo", "bar").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(index); assertHitCount(client().prepareSearch(index).setSize(0).get(), 1L); @@ -366,7 +366,7 @@ public void testMaybeFlush() throws Exception { .build() ) .get(); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("0") .setSource("{}", XContentType.JSON) .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE) @@ -385,7 +385,8 @@ public void testMaybeFlush() throws Exception { final Translog translog = getTranslog(shard); assertEquals(2, translog.stats().getUncommittedOperations()); assertThat(shard.flushStats().getTotal(), equalTo(0L)); - client().prepareIndex("test", "_doc", "2") + client().prepareIndex("test") + .setId("2") .setSource("{}", XContentType.JSON) .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE) .get(); @@ -414,7 +415,7 @@ public void testMaybeFlush() throws Exception { .build() ) .get(); - client().prepareDelete("test", "_doc", "2").get(); + client().prepareDelete("test", "2").get(); logger.info( "--> translog size after delete: [{}] num_ops [{}] generation [{}]", translog.stats().getUncommittedSizeInBytes(), @@ -493,7 +494,8 @@ public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { settings = Settings.builder().put("index.translog.generation_threshold_size", "117b").build(); } client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); - client().prepareIndex("test", "test", "0") + client().prepareIndex("test") + .setId("0") .setSource("{}", XContentType.JSON) .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE) .get(); @@ -519,7 +521,7 @@ public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { final CheckedRunnable check; if (flush) { final FlushStats initialStats = shard.flushStats(); - client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); check = () -> { assertFalse(shard.shouldPeriodicallyFlush()); final FlushStats currentStats = shard.flushStats(); @@ -544,7 +546,7 @@ public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { }; } else { final long generation = getTranslog(shard).currentFileGeneration(); - client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); check = () -> { assertFalse(shard.shouldRollTranslogGeneration()); assertEquals(generation + 1, getTranslog(shard).currentFileGeneration()); @@ -565,7 +567,7 @@ public void testFlushStats() throws Exception { client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); final int numDocs = between(10, 100); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); } // A flush stats may include the new total count but the old period count - assert eventually. assertBusy(() -> { @@ -576,7 +578,7 @@ public void testFlushStats() throws Exception { settings = Settings.builder().put("index.translog.flush_threshold_size", (String) null).build(); client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); - client().prepareIndex("test", "doc", UUIDs.randomBase64UUID()).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(UUIDs.randomBase64UUID()).setSource("{}", XContentType.JSON).get(); client().admin().indices().prepareFlush("test").setForce(randomBoolean()).setWaitIfOngoing(true).get(); final FlushStats flushStats = client().admin().indices().prepareStats("test").clear().setFlush(true).get().getTotal().flush; assertThat(flushStats.getTotal(), greaterThan(flushStats.getPeriodic())); @@ -588,9 +590,9 @@ public void testShardHasMemoryBufferOnTranslogRecover() throws Throwable { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexService(resolveIndex("test")); IndexShard shard = indexService.getShardOrNull(0); - client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); - client().prepareDelete("test", "test", "0").get(); - client().prepareIndex("test", "test", "1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareDelete("test", "0").get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); CheckedFunction wrapper = directoryReader -> directoryReader; shard.close("simon says", false); @@ -703,7 +705,7 @@ public void testInvalidateIndicesRequestCacheWhenRollbackEngine() throws Excepti final SearchRequest countRequest = new SearchRequest("test").source(new SearchSourceBuilder().size(0)); final long numDocs = between(10, 20); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); if (randomBoolean()) { shard.refresh("test"); } @@ -725,7 +727,7 @@ public void testInvalidateIndicesRequestCacheWhenRollbackEngine() throws Excepti final long moreDocs = between(10, 20); for (int i = 0; i < moreDocs; i++) { - client().prepareIndex("test", "_doc", Long.toString(i + numDocs)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Long.toString(i + numDocs)).setSource("{}", XContentType.JSON).get(); if (randomBoolean()) { shard.refresh("test"); } @@ -756,11 +758,9 @@ public void testShardChangesWithDefaultDocType() throws Exception { int numOps = between(1, 10); for (int i = 0; i < numOps; i++) { if (randomBoolean()) { - client().prepareIndex("index", randomFrom("_doc", "user_doc"), randomFrom("1", "2")) - .setSource("{}", XContentType.JSON) - .get(); + client().prepareIndex("index").setId(randomFrom("1", "2")).setSource("{}", XContentType.JSON).get(); } else { - client().prepareDelete("index", randomFrom("_doc", "user_doc"), randomFrom("1", "2")).get(); + client().prepareDelete("index", randomFrom("1", "2")).get(); } } IndexShard shard = indexService.getShard(0); @@ -821,7 +821,7 @@ public void testLimitNumberOfRetainedTranslogFiles() throws Exception { } }; for (int i = 0; i < 100; i++) { - client().prepareIndex(indexName, "_doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex(indexName).setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); if (randomInt(100) < 10) { client().admin().indices().prepareFlush(indexName).setWaitIfOngoing(true).get(); checkTranslog.run(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java index 6f2964769f2a7..2dc241e278768 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -150,7 +150,7 @@ public void testCorruptIndex() throws Exception { final int numExtraDocs = between(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numExtraDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } numDocs += numExtraDocs; @@ -326,7 +326,7 @@ public void testCorruptTranslogTruncation() throws Exception { logger.info("--> indexing [{}] docs to be kept", numDocsToKeep); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocsToKeep]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); flush(indexName); @@ -337,7 +337,7 @@ public void testCorruptTranslogTruncation() throws Exception { logger.info("--> indexing [{}] more doc to be truncated", numDocsToTruncate); builders = new IndexRequestBuilder[numDocsToTruncate]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); @@ -529,7 +529,7 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception { logger.info("--> indexing [{}] docs to be kept", numDocsToKeep); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocsToKeep]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); flush(indexName); @@ -539,7 +539,7 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception { logger.info("--> indexing [{}] more docs to be truncated", numDocsToTruncate); builders = new IndexRequestBuilder[numDocsToTruncate]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); final int totalDocs = numDocsToKeep + numDocsToTruncate; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/SearchIdleIT.java index 21d56ef53c26c..9382960b906e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/SearchIdleIT.java @@ -102,7 +102,7 @@ private void runTestAutomaticRefresh(final IntToLongFunction count) throws Inter int numDocs = scaledRandomIntBetween(25, 100); totalNumDocs.set(numDocs); CountDownLatch indexingDone = new CountDownLatch(numDocs); - client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); indexingDone.countDown(); // one doc is indexed above blocking IndexShard shard = indexService.getShard(0); boolean hasRefreshed = shard.scheduledRefresh(); @@ -133,7 +133,8 @@ private void runTestAutomaticRefresh(final IntToLongFunction count) throws Inter started.await(); assertThat(count.applyAsLong(totalNumDocs.get()), equalTo(1L)); for (int i = 1; i < numDocs; i++) { - client().prepareIndex("test", "test", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource("{\"foo\" : \"bar\"}", XContentType.JSON) .execute(new ActionListener() { @Override @@ -158,7 +159,7 @@ public void testPendingRefreshWithIntervalChange() throws Exception { IndexService indexService = createIndex("test", builder.build()); assertFalse(indexService.getIndexSettings().isExplicitRefresh()); ensureGreen(); - client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); IndexShard shard = indexService.getShard(0); assertFalse(shard.scheduledRefresh()); assertTrue(shard.isSearchIdle()); @@ -166,7 +167,7 @@ public void testPendingRefreshWithIntervalChange() throws Exception { client().admin().indices().prepareRefresh().execute(ActionListener.wrap(refreshLatch::countDown));// async on purpose to make sure // it happens concurrently assertHitCount(client().prepareSearch().get(), 1); - client().prepareIndex("test", "test", "1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); assertFalse(shard.scheduledRefresh()); assertTrue(shard.hasRefreshPending()); @@ -185,7 +186,7 @@ public void testPendingRefreshWithIntervalChange() throws Exception { // We need to ensure a `scheduledRefresh` triggered by the internal refresh setting update is executed before we index a new doc; // otherwise, it will compete to call `Engine#maybeRefresh` with the `scheduledRefresh` that we are going to verify. ensureNoPendingScheduledRefresh(indexService.getThreadPool()); - client().prepareIndex("test", "test", "2").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); assertTrue(shard.scheduledRefresh()); assertFalse(shard.hasRefreshPending()); assertTrue(shard.isSearchIdle()); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java index ece7add2b6937..3a5e21fc8ef65 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java @@ -177,7 +177,7 @@ public void testCorruptFileAndRecover() throws ExecutionException, InterruptedEx disableAllocation("test"); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); @@ -295,7 +295,7 @@ public void testCorruptPrimaryNoReplica() throws ExecutionException, Interrupted ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); @@ -456,7 +456,7 @@ public void testCorruptionOnNetworkLayer() throws ExecutionException, Interrupte ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); @@ -561,7 +561,7 @@ public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, I ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); @@ -643,7 +643,7 @@ public void testReplicaCorruption() throws Exception { ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java index 5f39002ac6625..1dd0f6a3d664e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java @@ -88,7 +88,7 @@ public void testCorruptTranslogFiles() throws Exception { // Index some documents IndexRequestBuilder[] builders = new IndexRequestBuilder[scaledRandomIntBetween(100, 1000)]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex("test").setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/ExceptionRetryIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/ExceptionRetryIT.java index 9fe0596357034..3e2091b2065e5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/ExceptionRetryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/ExceptionRetryIT.java @@ -127,7 +127,7 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I for (int i = 0; i < numDocs; i++) { XContentBuilder doc = null; doc = jsonBuilder().startObject().field("foo", "bar").endObject(); - bulkBuilder.add(client.prepareIndex("index", "type").setSource(doc)); + bulkBuilder.add(client.prepareIndex("index").setSource(doc)); } BulkResponse response = bulkBuilder.get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java index 3819b42e799ed..45fbb2651a96d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java @@ -72,7 +72,7 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { logger.info("indexing [{}] docs", numOfDocs); List builders = new ArrayList<>(numOfDocs); for (int j = 0; j < numOfDocs; j++) { - builders.add(client().prepareIndex("test", "type").setSource("field", "value_" + j)); + builders.add(client().prepareIndex("test").setSource("field", "value_" + j)); } indexRandom(true, builders); logger.info("verifying indexed content"); @@ -128,15 +128,15 @@ public void testCreatedFlag() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); + IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").execute().actionGet(); assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); - client().prepareDelete("test", "type", "1").execute().actionGet(); + client().prepareDelete("test", "1").execute().actionGet(); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } @@ -145,14 +145,14 @@ public void testCreatedFlagWithFlush() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); + IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - client().prepareDelete("test", "type", "1").execute().actionGet(); + client().prepareDelete("test", "1").execute().actionGet(); flush(); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } @@ -194,7 +194,8 @@ public void testCreatedFlagWithExternalVersioning() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(123) .setVersionType(VersionType.EXTERNAL) @@ -208,7 +209,7 @@ public void testCreateFlagWithBulk() { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")) + .add(client().prepareIndex("test").setId("1").setSource("field1", "value1_1")) .execute() .actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(false)); @@ -232,7 +233,7 @@ public void testCreateIndexWithLongName() { } try { - client().prepareIndex(randomAlphaOfLengthBetween(min, max).toLowerCase(Locale.ROOT), "mytype").setSource("foo", "bar").get(); + client().prepareIndex(randomAlphaOfLengthBetween(min, max).toLowerCase(Locale.ROOT)).setSource("foo", "bar").get(); fail("exception should have been thrown on too-long index name"); } catch (InvalidIndexNameException e) { assertThat( @@ -247,8 +248,7 @@ public void testCreateIndexWithLongName() { client().prepareIndex( randomAlphaOfLength(MetadataCreateIndexService.MAX_INDEX_NAME_BYTES - 1).toLowerCase(Locale.ROOT) + "Ϟ".toLowerCase( Locale.ROOT - ), - "mytype" + ) ).setSource("foo", "bar").get(); fail("exception should have been thrown on too-long index name"); } catch (InvalidIndexNameException e) { @@ -290,7 +290,7 @@ public void testInvalidIndexName() { public void testDocumentWithBlankFieldName() { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> { client().prepareIndex("test", "type", "1").setSource("", "value1_2").execute().actionGet(); } + () -> { client().prepareIndex("test").setId("1").setSource("", "value1_2").execute().actionGet(); } ); assertThat(e.getMessage(), containsString("failed to parse")); assertThat(e.getRootCause().getMessage(), containsString("field name cannot be an empty string")); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java index 19e1e196daad0..7236c32697384 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java @@ -71,9 +71,9 @@ public void testIndexNameDateMathExpressions() { String dateMathExp1 = "<.marvel-{now/d}>"; String dateMathExp2 = "<.marvel-{now/d-1d}>"; String dateMathExp3 = "<.marvel-{now/d-2d}>"; - client().prepareIndex(dateMathExp1, "type", "1").setSource("{}", XContentType.JSON).get(); - client().prepareIndex(dateMathExp2, "type", "2").setSource("{}", XContentType.JSON).get(); - client().prepareIndex(dateMathExp3, "type", "3").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp1).setId("1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp2).setId("2").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp3).setId("3").setSource("{}", XContentType.JSON).get(); refresh(); SearchResponse searchResponse = client().prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3).get(); @@ -109,15 +109,15 @@ public void testIndexNameDateMathExpressions() { assertThat(indicesStatsResponse.getIndex(index2), notNullValue()); assertThat(indicesStatsResponse.getIndex(index3), notNullValue()); - DeleteResponse deleteResponse = client().prepareDelete(dateMathExp1, "type", "1").get(); + DeleteResponse deleteResponse = client().prepareDelete(dateMathExp1, "1").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("1")); - deleteResponse = client().prepareDelete(dateMathExp2, "type", "2").get(); + deleteResponse = client().prepareDelete(dateMathExp2, "2").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("2")); - deleteResponse = client().prepareDelete(dateMathExp3, "type", "3").get(); + deleteResponse = client().prepareDelete(dateMathExp3, "3").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("3")); } @@ -131,9 +131,9 @@ public void testAutoCreateIndexWithDateMathExpression() throws Exception { String dateMathExp1 = "<.marvel-{now/d}>"; String dateMathExp2 = "<.marvel-{now/d-1d}>"; String dateMathExp3 = "<.marvel-{now/d-2d}>"; - client().prepareIndex(dateMathExp1, "type", "1").setSource("{}", XContentType.JSON).get(); - client().prepareIndex(dateMathExp2, "type", "2").setSource("{}", XContentType.JSON).get(); - client().prepareIndex(dateMathExp3, "type", "3").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp1).setId("1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp2).setId("2").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp3).setId("3").setSource("{}", XContentType.JSON).get(); refresh(); SearchResponse searchResponse = client().prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndexingMemoryControllerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndexingMemoryControllerIT.java index df96b3ee08fb3..63ea2ecbd428b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndexingMemoryControllerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndexingMemoryControllerIT.java @@ -122,14 +122,14 @@ public void testDeletesAloneCanTriggerRefresh() throws Exception { ); IndexShard shard = indexService.getShard(0); for (int i = 0; i < 100; i++) { - client().prepareIndex("index", "_doc").setId(Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("index").setId(Integer.toString(i)).setSource("field", "value").get(); } // Force merge so we know all merges are done before we start deleting: ForceMergeResponse r = client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet(); assertNoFailures(r); final RefreshStats refreshStats = shard.refreshStats(); for (int i = 0; i < 100; i++) { - client().prepareDelete("index", "_doc", Integer.toString(i)).get(); + client().prepareDelete("index", Integer.toString(i)).get(); } // need to assert busily as IndexingMemoryController refreshes in background assertBusy(() -> assertThat(shard.refreshStats().getTotal(), greaterThan(refreshStats.getTotal() + 1))); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesOptionsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesOptionsIntegrationIT.java index ef3a45e6755f7..3432cc967bf22 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesOptionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesOptionsIntegrationIT.java @@ -361,7 +361,7 @@ public void testWildcardBehaviour() throws Exception { verify(getSettings(indices).setIndicesOptions(options), false); assertAcked(prepareCreate("foobar")); - client().prepareIndex("foobar", "type", "1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("foobar").setId("1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); // Verify defaults for wildcards, with one wildcard expression and one existing index indices = new String[] { "foo*" }; @@ -455,7 +455,7 @@ public void testWildcardBehaviourSnapshotRestore() throws Exception { public void testAllMissingLenient() throws Exception { createIndex("test1"); - client().prepareIndex("test1", "type", "1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test1").setId("1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); SearchResponse response = client().prepareSearch("test2") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(matchAllQuery()) @@ -595,34 +595,34 @@ public void testPutAliasWildcard() throws Exception { } public void testPutMapping() throws Exception { - verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=text"), true); - verify(client().admin().indices().preparePutMapping("_all").setType("type1").setSource("field", "type=text"), true); + verify(client().admin().indices().preparePutMapping("foo").setSource("field", "type=text"), true); + verify(client().admin().indices().preparePutMapping("_all").setSource("field", "type=text"), true); for (String index : Arrays.asList("foo", "foobar", "bar", "barbaz")) { assertAcked(prepareCreate(index)); } - verify(client().admin().indices().preparePutMapping("foo").setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue()); - verify(client().admin().indices().preparePutMapping("b*").setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue()); - verify(client().admin().indices().preparePutMapping("_all").setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue()); - verify(client().admin().indices().preparePutMapping().setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue()); - - verify(client().admin().indices().preparePutMapping("c*").setType("type").setSource("field", "type=text"), true); + verify(client().admin().indices().preparePutMapping("foo").setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); + verify(client().admin().indices().preparePutMapping("b*").setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + verify(client().admin().indices().preparePutMapping("_all").setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + verify(client().admin().indices().preparePutMapping().setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + + verify(client().admin().indices().preparePutMapping("c*").setSource("field", "type=text"), true); assertAcked(client().admin().indices().prepareClose("barbaz").get()); - verify(client().admin().indices().preparePutMapping("barbaz").setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue()); + verify(client().admin().indices().preparePutMapping("barbaz").setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); } public static final class TestPlugin extends Plugin { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java index 1724ec6beafdf..18940cba80799 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java @@ -79,8 +79,8 @@ public void testCacheAggs() throws Exception { ); indexRandom( true, - client.prepareIndex("index", "type").setSource("f", "2014-03-10T00:00:00.000Z"), - client.prepareIndex("index", "type").setSource("f", "2014-05-13T00:00:00.000Z") + client.prepareIndex("index").setSource("f", "2014-03-10T00:00:00.000Z"), + client.prepareIndex("index").setSource("f", "2014-05-13T00:00:00.000Z") ); ensureSearchable("index"); @@ -149,15 +149,15 @@ public void testQueryRewrite() throws Exception { ); indexRandom( true, - client.prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"), - client.prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"), - client.prepareIndex("index", "type", "3").setRouting("1").setSource("s", "2016-03-21"), - client.prepareIndex("index", "type", "4").setRouting("2").setSource("s", "2016-03-22"), - client.prepareIndex("index", "type", "5").setRouting("2").setSource("s", "2016-03-23"), - client.prepareIndex("index", "type", "6").setRouting("2").setSource("s", "2016-03-24"), - client.prepareIndex("index", "type", "7").setRouting("3").setSource("s", "2016-03-25"), - client.prepareIndex("index", "type", "8").setRouting("3").setSource("s", "2016-03-26"), - client.prepareIndex("index", "type", "9").setRouting("3").setSource("s", "2016-03-27") + client.prepareIndex("index").setId("1").setRouting("1").setSource("s", "2016-03-19"), + client.prepareIndex("index").setId("2").setRouting("1").setSource("s", "2016-03-20"), + client.prepareIndex("index").setId("3").setRouting("1").setSource("s", "2016-03-21"), + client.prepareIndex("index").setId("4").setRouting("2").setSource("s", "2016-03-22"), + client.prepareIndex("index").setId("5").setRouting("2").setSource("s", "2016-03-23"), + client.prepareIndex("index").setId("6").setRouting("2").setSource("s", "2016-03-24"), + client.prepareIndex("index").setId("7").setRouting("3").setSource("s", "2016-03-25"), + client.prepareIndex("index").setId("8").setRouting("3").setSource("s", "2016-03-26"), + client.prepareIndex("index").setId("9").setRouting("3").setSource("s", "2016-03-27") ); ensureSearchable("index"); assertCacheState(client, "index", 0, 0); @@ -219,15 +219,15 @@ public void testQueryRewriteMissingValues() throws Exception { ); indexRandom( true, - client.prepareIndex("index", "type", "1").setSource("s", "2016-03-19"), - client.prepareIndex("index", "type", "2").setSource("s", "2016-03-20"), - client.prepareIndex("index", "type", "3").setSource("s", "2016-03-21"), - client.prepareIndex("index", "type", "4").setSource("s", "2016-03-22"), - client.prepareIndex("index", "type", "5").setSource("s", "2016-03-23"), - client.prepareIndex("index", "type", "6").setSource("s", "2016-03-24"), - client.prepareIndex("index", "type", "7").setSource("other", "value"), - client.prepareIndex("index", "type", "8").setSource("s", "2016-03-26"), - client.prepareIndex("index", "type", "9").setSource("s", "2016-03-27") + client.prepareIndex("index").setId("1").setSource("s", "2016-03-19"), + client.prepareIndex("index").setId("2").setSource("s", "2016-03-20"), + client.prepareIndex("index").setId("3").setSource("s", "2016-03-21"), + client.prepareIndex("index").setId("4").setSource("s", "2016-03-22"), + client.prepareIndex("index").setId("5").setSource("s", "2016-03-23"), + client.prepareIndex("index").setId("6").setSource("s", "2016-03-24"), + client.prepareIndex("index").setId("7").setSource("other", "value"), + client.prepareIndex("index").setId("8").setSource("s", "2016-03-26"), + client.prepareIndex("index").setId("9").setSource("s", "2016-03-27") ); ensureSearchable("index"); assertCacheState(client, "index", 0, 0); @@ -285,15 +285,15 @@ public void testQueryRewriteDates() throws Exception { ); indexRandom( true, - client.prepareIndex("index", "type", "1").setSource("d", "2014-01-01T00:00:00"), - client.prepareIndex("index", "type", "2").setSource("d", "2014-02-01T00:00:00"), - client.prepareIndex("index", "type", "3").setSource("d", "2014-03-01T00:00:00"), - client.prepareIndex("index", "type", "4").setSource("d", "2014-04-01T00:00:00"), - client.prepareIndex("index", "type", "5").setSource("d", "2014-05-01T00:00:00"), - client.prepareIndex("index", "type", "6").setSource("d", "2014-06-01T00:00:00"), - client.prepareIndex("index", "type", "7").setSource("d", "2014-07-01T00:00:00"), - client.prepareIndex("index", "type", "8").setSource("d", "2014-08-01T00:00:00"), - client.prepareIndex("index", "type", "9").setSource("d", "2014-09-01T00:00:00") + client.prepareIndex("index").setId("1").setSource("d", "2014-01-01T00:00:00"), + client.prepareIndex("index").setId("2").setSource("d", "2014-02-01T00:00:00"), + client.prepareIndex("index").setId("3").setSource("d", "2014-03-01T00:00:00"), + client.prepareIndex("index").setId("4").setSource("d", "2014-04-01T00:00:00"), + client.prepareIndex("index").setId("5").setSource("d", "2014-05-01T00:00:00"), + client.prepareIndex("index").setId("6").setSource("d", "2014-06-01T00:00:00"), + client.prepareIndex("index").setId("7").setSource("d", "2014-07-01T00:00:00"), + client.prepareIndex("index").setId("8").setSource("d", "2014-08-01T00:00:00"), + client.prepareIndex("index").setId("9").setSource("d", "2014-09-01T00:00:00") ); ensureSearchable("index"); assertCacheState(client, "index", 0, 0); @@ -352,15 +352,15 @@ public void testQueryRewriteDatesWithNow() throws Exception { DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time"); indexRandom( true, - client.prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now)), - client.prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1))), - client.prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2))), - client.prepareIndex("index-2", "type", "4").setSource("d", formatter.format(now.minusDays(3))), - client.prepareIndex("index-2", "type", "5").setSource("d", formatter.format(now.minusDays(4))), - client.prepareIndex("index-2", "type", "6").setSource("d", formatter.format(now.minusDays(5))), - client.prepareIndex("index-3", "type", "7").setSource("d", formatter.format(now.minusDays(6))), - client.prepareIndex("index-3", "type", "8").setSource("d", formatter.format(now.minusDays(7))), - client.prepareIndex("index-3", "type", "9").setSource("d", formatter.format(now.minusDays(8))) + client.prepareIndex("index-1").setId("1").setSource("d", formatter.format(now)), + client.prepareIndex("index-1").setId("2").setSource("d", formatter.format(now.minusDays(1))), + client.prepareIndex("index-1").setId("3").setSource("d", formatter.format(now.minusDays(2))), + client.prepareIndex("index-2").setId("4").setSource("d", formatter.format(now.minusDays(3))), + client.prepareIndex("index-2").setId("5").setSource("d", formatter.format(now.minusDays(4))), + client.prepareIndex("index-2").setId("6").setSource("d", formatter.format(now.minusDays(5))), + client.prepareIndex("index-3").setId("7").setSource("d", formatter.format(now.minusDays(6))), + client.prepareIndex("index-3").setId("8").setSource("d", formatter.format(now.minusDays(7))), + client.prepareIndex("index-3").setId("9").setSource("d", formatter.format(now.minusDays(8))) ); ensureSearchable("index-1", "index-2", "index-3"); assertCacheState(client, "index-1", 0, 0); @@ -429,15 +429,15 @@ public void testCanCache() throws Exception { assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date").setSettings(settings).get()); indexRandom( true, - client.prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"), - client.prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"), - client.prepareIndex("index", "type", "3").setRouting("1").setSource("s", "2016-03-21"), - client.prepareIndex("index", "type", "4").setRouting("2").setSource("s", "2016-03-22"), - client.prepareIndex("index", "type", "5").setRouting("2").setSource("s", "2016-03-23"), - client.prepareIndex("index", "type", "6").setRouting("2").setSource("s", "2016-03-24"), - client.prepareIndex("index", "type", "7").setRouting("3").setSource("s", "2016-03-25"), - client.prepareIndex("index", "type", "8").setRouting("3").setSource("s", "2016-03-26"), - client.prepareIndex("index", "type", "9").setRouting("3").setSource("s", "2016-03-27") + client.prepareIndex("index").setId("1").setRouting("1").setSource("s", "2016-03-19"), + client.prepareIndex("index").setId("2").setRouting("1").setSource("s", "2016-03-20"), + client.prepareIndex("index").setId("3").setRouting("1").setSource("s", "2016-03-21"), + client.prepareIndex("index").setId("4").setRouting("2").setSource("s", "2016-03-22"), + client.prepareIndex("index").setId("5").setRouting("2").setSource("s", "2016-03-23"), + client.prepareIndex("index").setId("6").setRouting("2").setSource("s", "2016-03-24"), + client.prepareIndex("index").setId("7").setRouting("3").setSource("s", "2016-03-25"), + client.prepareIndex("index").setId("8").setRouting("3").setSource("s", "2016-03-26"), + client.prepareIndex("index").setId("9").setRouting("3").setSource("s", "2016-03-27") ); ensureSearchable("index"); assertCacheState(client, "index", 0, 0); @@ -535,10 +535,7 @@ public void testCacheWithFilteredAlias() { .get() ); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); - client.prepareIndex("index", "type", "1") - .setRouting("1") - .setSource("created_at", DateTimeFormatter.ISO_LOCAL_DATE.format(now)) - .get(); + client.prepareIndex("index").setId("1").setRouting("1").setSource("created_at", DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get(); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge("index").setFlush(true).get(); OpenSearchAssertions.assertAllSuccessful(forceMergeResponse); @@ -590,7 +587,7 @@ public void testProfileDisableCache() throws Exception { ) .get() ); - indexRandom(true, client.prepareIndex("index", "_doc").setSource("k", "hello")); + indexRandom(true, client.prepareIndex("index").setSource("k", "hello")); ensureSearchable("index"); int expectedHits = 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java index 8c34656c34e99..7218495898677 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java @@ -155,7 +155,7 @@ public void testAnalyzerWithFieldOrTypeTests() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); ensureGreen(); - client().admin().indices().preparePutMapping("test").setType("document").setSource("simple", "type=text,analyzer=simple").get(); + client().admin().indices().preparePutMapping("test").setSource("simple", "type=text,analyzer=simple").get(); for (int i = 0; i < 10; i++) { final AnalyzeRequestBuilder requestBuilder = client().admin().indices().prepareAnalyze("THIS IS A TEST"); @@ -201,7 +201,6 @@ public void testAnalyzerWithMultiValues() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("document") .setSource("simple", "type=text,analyzer=simple,position_increment_gap=100") .get(); @@ -304,7 +303,6 @@ public void testDetailAnalyzeWithMultiValues() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("document") .setSource("simple", "type=text,analyzer=simple,position_increment_gap=100") .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java index ef7fff331f0c2..7dc1933575ea3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -80,7 +80,8 @@ public void testConcurrentDynamicMapping() throws Exception { for (int j = 0; j < numDocs; j++) { Map source = new HashMap<>(); source.put(fieldName, "test-user"); - client().prepareIndex("test", mappingType, Integer.toString(currentID++)) + client().prepareIndex("test") + .setId(Integer.toString(currentID++)) .setSource(source) .execute(new ActionListener() { @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java index 52e2fe303c377..da0f88276f2fa 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -159,8 +159,8 @@ public void testGetFieldMappings() throws Exception { @SuppressWarnings("unchecked") public void testSimpleGetFieldMappingsWithDefaults() throws Exception { assertAcked(prepareCreate("test").addMapping("type", getMappingForType("type"))); - client().admin().indices().preparePutMapping("test").setType("type").setSource("num", "type=long").get(); - client().admin().indices().preparePutMapping("test").setType("type").setSource("field2", "type=text,index=false").get(); + client().admin().indices().preparePutMapping("test").setSource("num", "type=long").get(); + client().admin().indices().preparePutMapping("test").setSource("field2", "type=text,index=false").get(); GetFieldMappingsResponse response = client().admin() .indices() diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java index cfd4a830e5c37..aac12522afa2f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java @@ -34,6 +34,7 @@ import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.Priority; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.plugins.Plugin; @@ -66,7 +67,7 @@ public void testGetMappingsWhereThereAreNone() { createIndex("index"); GetMappingsResponse response = client().admin().indices().prepareGetMappings().execute().actionGet(); assertThat(response.mappings().containsKey("index"), equalTo(true)); - assertThat(response.mappings().get("index").size(), equalTo(0)); + assertEquals(MappingMetadata.EMPTY_MAPPINGS, response.mappings().get("index")); } private XContentBuilder getMappingForType(String type) throws IOException { @@ -97,50 +98,19 @@ public void testSimpleGetMappings() throws Exception { // Get all mappings GetMappingsResponse response = client().admin().indices().prepareGetMappings().execute().actionGet(); assertThat(response.mappings().size(), equalTo(2)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - assertThat(response.mappings().get("indexb").size(), equalTo(1)); - assertThat(response.mappings().get("indexb").get("typeA"), notNullValue()); + assertThat(response.mappings().get("indexa"), notNullValue()); + assertThat(response.mappings().get("indexb"), notNullValue()); // Get all mappings, via wildcard support - response = client().admin().indices().prepareGetMappings("*").setTypes("*").execute().actionGet(); + response = client().admin().indices().prepareGetMappings("*").execute().actionGet(); assertThat(response.mappings().size(), equalTo(2)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - assertThat(response.mappings().get("indexb").size(), equalTo(1)); - assertThat(response.mappings().get("indexb").get("typeA"), notNullValue()); + assertThat(response.mappings().get("indexa"), notNullValue()); + assertThat(response.mappings().get("indexb"), notNullValue()); - // Get all typeA mappings in all indices - response = client().admin().indices().prepareGetMappings("*").setTypes("typeA").execute().actionGet(); - assertThat(response.mappings().size(), equalTo(2)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - assertThat(response.mappings().get("indexb").size(), equalTo(1)); - assertThat(response.mappings().get("indexb").get("typeA"), notNullValue()); - - // Get all mappings in indexa + // Get mappings in indexa response = client().admin().indices().prepareGetMappings("indexa").execute().actionGet(); assertThat(response.mappings().size(), equalTo(1)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - - // Get all mappings beginning with A* in indexa - response = client().admin().indices().prepareGetMappings("indexa").setTypes("*A").execute().actionGet(); - assertThat(response.mappings().size(), equalTo(1)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - - // Get all mappings beginning with B* in all indices - response = client().admin().indices().prepareGetMappings().setTypes("B*").execute().actionGet(); - assertThat(response.mappings().size(), equalTo(0)); - - // Get all mappings beginning with B* and A* in all indices - response = client().admin().indices().prepareGetMappings().setTypes("B*", "*A").execute().actionGet(); - assertThat(response.mappings().size(), equalTo(2)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - assertThat(response.mappings().get("indexb").size(), equalTo(1)); - assertThat(response.mappings().get("indexb").get("typeA"), notNullValue()); + assertThat(response.mappings().get("indexa"), notNullValue()); } public void testGetMappingsWithBlocks() throws IOException { @@ -152,7 +122,7 @@ public void testGetMappingsWithBlocks() throws IOException { enableIndexBlock("test", block); GetMappingsResponse response = client().admin().indices().prepareGetMappings().execute().actionGet(); assertThat(response.mappings().size(), equalTo(1)); - assertThat(response.mappings().get("test").size(), equalTo(1)); + assertNotNull(response.mappings().get("test")); } finally { disableIndexBlock("test", block); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java index 0afe067afb686..32584a9e33b52 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -41,7 +41,6 @@ import org.opensearch.cluster.action.index.MappingUpdatedAction; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.Priority; -import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; @@ -112,7 +111,8 @@ public void testDynamicUpdates() throws Exception { String type = "type"; String fieldName = "field_" + type + "_" + rec; indexRequests.add( - client().prepareIndex("test", type, Integer.toString(rec)) + client().prepareIndex("test") + .setId(Integer.toString(rec)) .setTimeout(TimeValue.timeValueMinutes(5)) .setSource(fieldName, "some_value") ); @@ -153,7 +153,6 @@ public void testUpdateMappingWithoutType() { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping("test") - .setType("_doc") .setSource("{\"properties\":{\"date\":{\"type\":\"integer\"}}}", XContentType.JSON) .execute() .actionGet(); @@ -162,7 +161,7 @@ public void testUpdateMappingWithoutType() { GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").execute().actionGet(); assertThat( - getMappingsResponse.mappings().get("test").get("_doc").source().toString(), + getMappingsResponse.mappings().get("test").source().toString(), equalTo("{\"_doc\":{\"properties\":{\"body\":{\"type\":\"text\"},\"date\":{\"type\":\"integer\"}}}}") ); } @@ -179,7 +178,6 @@ public void testUpdateMappingWithoutTypeMultiObjects() { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping("test") - .setType("_doc") .setSource("{\"properties\":{\"date\":{\"type\":\"integer\"}}}", XContentType.JSON) .execute() .actionGet(); @@ -188,7 +186,7 @@ public void testUpdateMappingWithoutTypeMultiObjects() { GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").execute().actionGet(); assertThat( - getMappingsResponse.mappings().get("test").get("_doc").source().toString(), + getMappingsResponse.mappings().get("test").source().toString(), equalTo("{\"_doc\":{\"properties\":{\"date\":{\"type\":\"integer\"}}}}") ); } @@ -207,7 +205,6 @@ public void testUpdateMappingWithConflicts() { client().admin() .indices() .preparePutMapping("test") - .setType("type") .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}", XContentType.JSON) .execute() .actionGet(); @@ -228,7 +225,6 @@ public void testUpdateMappingWithNormsConflicts() { client().admin() .indices() .preparePutMapping("test") - .setType("type") .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}", XContentType.JSON) .execute() .actionGet(); @@ -254,7 +250,6 @@ public void testUpdateMappingNoChanges() { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping("test") - .setType("type") .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) .execute() .actionGet(); @@ -288,17 +283,15 @@ public void testUpdateMappingConcurrently() throws Throwable { Client client1 = clientArray.get(i % clientArray.size()); Client client2 = clientArray.get((i + 1) % clientArray.size()); String indexName = i % 2 == 0 ? "test2" : "test1"; - String typeName = "type"; String fieldName = Thread.currentThread().getName() + "_" + i; AcknowledgedResponse response = client1.admin() .indices() .preparePutMapping(indexName) - .setType(typeName) .setSource( JsonXContent.contentBuilder() .startObject() - .startObject(typeName) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(fieldName) .field("type", "text") @@ -312,10 +305,9 @@ public void testUpdateMappingConcurrently() throws Throwable { assertThat(response.isAcknowledged(), equalTo(true)); GetMappingsResponse getMappingResponse = client2.admin().indices().prepareGetMappings(indexName).get(); - ImmutableOpenMap mappings = getMappingResponse.getMappings().get(indexName); - assertThat(mappings.containsKey(typeName), equalTo(true)); + MappingMetadata mappings = getMappingResponse.getMappings().get(indexName); assertThat( - ((Map) mappings.get(typeName).getSourceAsMap().get("properties")).keySet(), + ((Map) mappings.getSourceAsMap().get("properties")).keySet(), Matchers.hasItem(fieldName) ); } @@ -349,7 +341,6 @@ public void testPutMappingsWithBlocks() { client().admin() .indices() .preparePutMapping("test") - .setType("_doc") .setSource("{\"properties\":{\"date\":{\"type\":\"integer\"}}}", XContentType.JSON) ); } finally { @@ -364,7 +355,6 @@ public void testPutMappingsWithBlocks() { client().admin() .indices() .preparePutMapping("test") - .setType("_doc") .setSource("{\"properties\":{\"date\":{\"type\":\"integer\"}}}", XContentType.JSON) ); } finally { @@ -398,12 +388,10 @@ private void assertConcreteMappingsOnAll(final String index, final String... fie */ private void assertMappingOnMaster(final String index, final String... fieldNames) { GetMappingsResponse response = client().admin().indices().prepareGetMappings(index).get(); - ImmutableOpenMap mappings = response.getMappings().get(index); + MappingMetadata mappings = response.getMappings().get(index); assertThat(mappings, notNullValue()); - MappingMetadata mappingMetadata = mappings.get(MapperService.SINGLE_MAPPING_NAME); - assertThat(mappingMetadata, notNullValue()); + Map mappingSource = mappings.getSourceAsMap(); - Map mappingSource = mappingMetadata.getSourceAsMap(); assertFalse(mappingSource.isEmpty()); assertTrue(mappingSource.containsKey("properties")); @@ -413,7 +401,7 @@ private void assertMappingOnMaster(final String index, final String... fieldName fieldName = fieldName.replace(".", ".properties."); } assertThat( - "field " + fieldName + " doesn't exists in mapping " + mappingMetadata.source().string(), + "field " + fieldName + " doesn't exists in mapping " + mappings.source().string(), XContentMapValues.extractValue(fieldName, mappingProperties), notNullValue() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerNoopIT.java index a409475da3cd7..1f79a52284c61 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -70,7 +70,7 @@ public void testNoopRequestBreaker() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", id)); } indexRandom(true, reqs); @@ -87,7 +87,7 @@ public void testNoopFielddataBreaker() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", id)); } indexRandom(true, reqs); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 0772bc2965c4c..e9bb9f5a90477 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -154,7 +154,7 @@ public void testMemoryBreaker() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", "value" + id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", "value" + id)); } indexRandom(true, false, true, reqs); @@ -208,7 +208,7 @@ public void testRamAccountingTermsEnum() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("ramtest", "type", Long.toString(id)).setSource("test", "value" + id)); + reqs.add(client.prepareIndex("ramtest").setId(Long.toString(id)).setSource("test", "value" + id)); } indexRandom(true, false, true, reqs); @@ -261,7 +261,7 @@ public void testRequestBreaker() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", id)); } indexRandom(true, reqs); @@ -295,7 +295,7 @@ public void testBucketBreaker() throws Exception { int docCount = scaledRandomIntBetween(100, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", id)); } indexRandom(true, reqs); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index eb36e1c155ecc..3d907bcaf3198 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -169,7 +169,8 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc } for (int i = 0; i < numDocs; i++) { try { - client().prepareIndex("test", "type", "" + i) + client().prepareIndex("test") + .setId("" + i) .setTimeout(TimeValue.timeValueSeconds(1)) .setSource("test-str", randomUnicodeOfLengthBetween(5, 25), "test-num", i) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index 6d62b7969bc48..7fd2466647272 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -71,11 +71,11 @@ public void testPrimaryRelocationWhileIndexing() throws Exception { @Override public void run() { while (finished.get() == false && numAutoGenDocs.get() < 10_000) { - IndexResponse indexResponse = client().prepareIndex("test", "type", "id").setSource("field", "value").get(); + IndexResponse indexResponse = client().prepareIndex("test").setId("id").setSource("field", "value").get(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "id").get(); + DeleteResponse deleteResponse = client().prepareDelete("test", "id").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); - client().prepareIndex("test", "type").setSource("auto", true).get(); + client().prepareIndex("test").setSource("auto", true).get(); numAutoGenDocs.incrementAndGet(); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index 3bab909d3b7f3..a7dc77e024d5c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -33,8 +33,8 @@ package org.opensearch.indices.recovery; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.util.SetOnce; - import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; @@ -75,6 +75,7 @@ import org.opensearch.common.Strings; import org.opensearch.common.breaker.CircuitBreaker; import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeUnit; import org.opensearch.common.unit.ByteSizeValue; @@ -88,7 +89,6 @@ import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.analysis.AbstractTokenFilterFactory; import org.opensearch.index.analysis.TokenFilterFactory; -import org.opensearch.index.engine.Engine; import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.index.recovery.RecoveryStats; import org.opensearch.index.seqno.ReplicationTracker; @@ -114,11 +114,11 @@ import org.opensearch.snapshots.SnapshotState; import org.opensearch.tasks.Task; import org.opensearch.test.BackgroundIndexer; +import org.opensearch.test.InternalSettingsPlugin; +import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.InternalTestCluster; import org.opensearch.test.engine.MockEngineSupport; import org.opensearch.test.store.MockFSIndexStore; import org.opensearch.test.transport.MockTransportService; @@ -151,12 +151,6 @@ import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.toList; -import static org.opensearch.action.DocWriteResponse.Result.CREATED; -import static org.opensearch.action.DocWriteResponse.Result.UPDATED; -import static org.opensearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; - import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; @@ -167,12 +161,16 @@ import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; +import static org.opensearch.action.DocWriteResponse.Result.CREATED; +import static org.opensearch.action.DocWriteResponse.Result.UPDATED; +import static org.opensearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class IndexRecoveryIT extends OpenSearchIntegTestCase { private static final String INDEX_NAME = "test-idx-1"; - private static final String INDEX_TYPE = "test-type-1"; private static final String REPO_NAME = "test-repo-1"; private static final String SNAP_NAME = "test-snap-1"; @@ -414,7 +412,7 @@ public void testCancelNewShardRecoveryAndUsesExistingShardCopy() throws Exceptio int numDocs = randomIntBetween(10, 200); final IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex(INDEX_NAME, INDEX_TYPE) + docs[i] = client().prepareIndex(INDEX_NAME) .setSource("foo-int", randomInt(), "foo-string", randomAlphaOfLength(32), "foo-float", randomFloat()); } indexRandom(randomBoolean(), docs); @@ -828,7 +826,7 @@ private IndicesStatsResponse createAndPopulateIndex(String name, int nodeCount, final IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex(name, INDEX_TYPE) + docs[i] = client().prepareIndex(name) .setSource("foo-int", randomInt(), "foo-string", randomAlphaOfLength(32), "foo-float", randomFloat()); } @@ -881,14 +879,14 @@ public void testTransientErrorsDuringRecoveryAreRetried() throws Exception { // is a mix of file chunks and translog ops int threeFourths = (int) (numDocs * 0.75); for (int i = 0; i < threeFourths; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); flush(indexName); requests.clear(); for (int i = threeFourths; i < numDocs; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); ensureSearchable(indexName); @@ -1080,7 +1078,7 @@ public void testDisconnectsWhileRecovering() throws Exception { List requests = new ArrayList<>(); int numDocs = scaledRandomIntBetween(25, 250); for (int i = 0; i < numDocs; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); ensureSearchable(indexName); @@ -1234,7 +1232,7 @@ public void testDisconnectsDuringRecovery() throws Exception { List requests = new ArrayList<>(); int numDocs = scaledRandomIntBetween(25, 250); for (int i = 0; i < numDocs; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); ensureSearchable(indexName); @@ -1377,7 +1375,7 @@ public void testHistoryRetention() throws Exception { final List requests = new ArrayList<>(); final int replicatedDocCount = scaledRandomIntBetween(25, 250); while (requests.size() < replicatedDocCount) { - requests.add(client().prepareIndex(indexName, "_doc").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); if (randomBoolean()) { @@ -1399,7 +1397,7 @@ public void testHistoryRetention() throws Exception { final int numNewDocs = scaledRandomIntBetween(25, 250); for (int i = 0; i < numNewDocs; i++) { - client().prepareIndex(indexName, "_doc").setSource("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex(indexName).setSource("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); } // Flush twice to update the safe commit's local checkpoint assertThat(client().admin().indices().prepareFlush(indexName).setForce(true).execute().get().getFailedShards(), equalTo(0)); @@ -1435,15 +1433,11 @@ public void testDoNotInfinitelyWaitForMapping() { .put("index.number_of_shards", 1) .build() ); - client().admin() - .indices() - .preparePutMapping("test") - .setType("_doc") - .setSource("test_field", "type=text,analyzer=test_analyzer") - .get(); + client().admin().indices().preparePutMapping("test").setSource("test_field", "type=text,analyzer=test_analyzer").get(); int numDocs = between(1, 10); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "_doc", "u" + i) + client().prepareIndex("test") + .setId("u" + i) .setSource(singletonMap("test_field", Integer.toString(i)), XContentType.JSON) .get(); } @@ -1562,7 +1556,7 @@ public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception { randomBoolean(), false, randomBoolean(), - IntStream.range(0, numDocs).mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)).collect(toList()) + IntStream.range(0, numDocs).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); client().admin().indices().prepareRefresh(indexName).get(); // avoid refresh when we are failing a shard String failingNode = randomFrom(nodes); @@ -1604,9 +1598,9 @@ public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception { .getShardOrNull(new ShardId(resolveIndex(indexName), 0)); final long lastSyncedGlobalCheckpoint = shard.getLastSyncedGlobalCheckpoint(); final long localCheckpointOfSafeCommit; - try (Engine.IndexCommitRef safeCommitRef = shard.acquireSafeIndexCommit()) { + try (GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit()) { localCheckpointOfSafeCommit = SequenceNumbers.loadSeqNoInfoFromLuceneCommit( - safeCommitRef.getIndexCommit().getUserData().entrySet() + wrappedSafeCommit.get().getUserData().entrySet() ).localCheckpoint; } final long maxSeqNo = shard.seqNoStats().getMaxSeqNo(); @@ -1658,9 +1652,7 @@ public void testUsesFileBasedRecoveryIfRetentionLeaseMissing() throws Exception randomBoolean(), randomBoolean(), randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); ensureGreen(indexName); @@ -1731,9 +1723,7 @@ public void testUsesFileBasedRecoveryIfRetentionLeaseAheadOfGlobalCheckpoint() t randomBoolean(), randomBoolean(), randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); ensureGreen(indexName); @@ -1767,7 +1757,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(1, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); @@ -1826,9 +1816,7 @@ public void testUsesFileBasedRecoveryIfOperationsBasedRecoveryWouldBeUnreasonabl randomBoolean(), false, randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); ensureGreen(indexName); @@ -1912,7 +1900,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, newDocCount) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); @@ -1964,9 +1952,7 @@ public void testDoesNotCopyOperationsInSafeCommit() throws Exception { randomBoolean(), randomBoolean(), randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); final ShardId shardId = new ShardId(resolveIndex(indexName), 0); @@ -1985,9 +1971,7 @@ public void testDoesNotCopyOperationsInSafeCommit() throws Exception { randomBoolean(), randomBoolean(), randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); assertAcked( @@ -2056,7 +2040,7 @@ public void testRepeatedRecovery() throws Exception { false, randomBoolean(), IntStream.range(0, randomIntBetween(0, 10)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); @@ -2088,7 +2072,7 @@ public void testRepeatedRecovery() throws Exception { false, randomBoolean(), IntStream.range(0, randomIntBetween(0, 10)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); @@ -2117,7 +2101,7 @@ public void testAllocateEmptyPrimaryResetsGlobalCheckpoint() throws Exception { .get() ); final List indexRequests = IntStream.range(0, between(10, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "type").setSource("foo", "bar")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("foo", "bar")) .collect(Collectors.toList()); indexRandom(randomBoolean(), true, true, indexRequests); ensureGreen(); @@ -2179,7 +2163,7 @@ public void testPeerRecoveryTrimsLocalTranslog() throws Exception { indexers[i] = new Thread(() -> { while (stopped.get() == false) { try { - IndexResponse response = client().prepareIndex(indexName, "_doc") + IndexResponse response = client().prepareIndex(indexName) .setSource(Collections.singletonMap("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); assertThat(response.getResult(), isOneOf(CREATED, UPDATED)); @@ -2233,7 +2217,7 @@ public void testReservesBytesDuringPeerRecoveryPhaseOne() throws Exception { ); ensureGreen(indexName); final List indexRequests = IntStream.range(0, between(10, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("foo", "bar")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("foo", "bar")) .collect(Collectors.toList()); indexRandom(randomBoolean(), true, true, indexRequests); assertThat(client().admin().indices().prepareFlush(indexName).get().getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateNumberOfReplicasIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateNumberOfReplicasIT.java index d2d025949abb6..f78ecd82834c2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateNumberOfReplicasIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateNumberOfReplicasIT.java @@ -80,7 +80,8 @@ public void testSimpleUpdateNumberOfReplicas() throws Exception { assertThat(clusterHealth.getIndices().get("test").getActiveShards(), equalTo(numShards.totalNumShards)); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("value", "test" + i).endObject()) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateSettingsIT.java index 13e1e4a3fea52..6dab7781e08db 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateSettingsIT.java @@ -506,16 +506,16 @@ public void testOpenCloseUpdateSettings() throws Exception { public void testEngineGCDeletesSetting() throws Exception { createIndex("test"); - client().prepareIndex("test", "type", "1").setSource("f", 1).setVersionType(VersionType.EXTERNAL).setVersion(1).get(); - client().prepareDelete("test", "type", "1").setVersionType(VersionType.EXTERNAL).setVersion(2).get(); + client().prepareIndex("test").setId("1").setSource("f", 1).setVersionType(VersionType.EXTERNAL).setVersion(1).get(); + client().prepareDelete("test", "1").setVersionType(VersionType.EXTERNAL).setVersion(2).get(); // delete is still in cache this should fail assertRequestBuilderThrows( - client().prepareIndex("test", "type", "1").setSource("f", 3).setVersionType(VersionType.EXTERNAL).setVersion(1), + client().prepareIndex("test").setId("1").setSource("f", 3).setVersionType(VersionType.EXTERNAL).setVersion(1), VersionConflictEngineException.class ); assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.gc_deletes", 0))); - client().prepareDelete("test", "type", "1").setVersionType(VersionType.EXTERNAL).setVersion(4).get(); + client().prepareDelete("test", "1").setVersionType(VersionType.EXTERNAL).setVersion(4).get(); // Make sure the time has advanced for InternalEngine#resolveDocVersion() for (ThreadPool threadPool : internalCluster().getInstances(ThreadPool.class)) { @@ -524,7 +524,7 @@ public void testEngineGCDeletesSetting() throws Exception { } // delete should not be in cache - client().prepareIndex("test", "type", "1").setSource("f", 2).setVersionType(VersionType.EXTERNAL).setVersion(1); + client().prepareIndex("test").setId("1").setSource("f", 2).setVersionType(VersionType.EXTERNAL).setVersion(1); } public void testUpdateSettingsWithBlocks() { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java index aebb891ae784b..41749a9bfd0f4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java @@ -143,7 +143,7 @@ public void testCloseIndex() throws Exception { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); @@ -164,7 +164,7 @@ public void testCloseAlreadyClosedIndex() throws Exception { false, randomBoolean(), IntStream.range(0, randomIntBetween(1, 10)) - .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); } @@ -207,7 +207,7 @@ public void testConcurrentClose() throws InterruptedException { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); ensureYellowAndNoInitializingShards(indexName); @@ -268,7 +268,7 @@ public void testCloseWhileDeletingIndices() throws Exception { false, randomBoolean(), IntStream.range(0, 10) - .mapToObj(n -> client().prepareIndex(indexName, "_doc", String.valueOf(n)).setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setId(String.valueOf(n)).setSource("num", n)) .collect(toList()) ); } @@ -395,7 +395,7 @@ public void testCloseIndexWaitForActiveShards() throws Exception { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); ensureGreen(indexName); @@ -433,7 +433,7 @@ public void testNoopPeerRecoveriesWhenIndexClosed() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(0, 50)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); ensureGreen(indexName); @@ -480,7 +480,7 @@ public void testRecoverExistingReplica() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(0, 50)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); ensureGreen(indexName); @@ -492,7 +492,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { Client client = client(dataNodes.get(0)); int moreDocs = randomIntBetween(1, 50); for (int i = 0; i < moreDocs; i++) { - client.prepareIndex(indexName, "_doc").setSource("num", i).get(); + client.prepareIndex(indexName).setSource("num", i).get(); } assertAcked(client.admin().indices().prepareClose(indexName)); return super.onNodeStopped(nodeName); @@ -529,7 +529,7 @@ public void testRelocatedClosedIndexIssue() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(0, 50)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); assertAcked(client().admin().indices().prepareClose(indexName).setWaitForActiveShards(ActiveShardCount.ALL)); @@ -557,7 +557,7 @@ public void testResyncPropagatePrimaryTerm() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(0, 50)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); ensureGreen(indexName); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseWhileRelocatingShardsIT.java index 88be4d71aeb63..caf741e9b8882 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -120,7 +120,7 @@ public void testCloseWhileRelocatingShards() throws Exception { indexRandom( randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(Collectors.toList()) ); break; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java index 95a421b126bae..b8baa35507892 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java @@ -320,7 +320,7 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte int docs = between(10, 100); IndexRequestBuilder[] builder = new IndexRequestBuilder[docs]; for (int i = 0; i < docs; i++) { - builder[i] = client().prepareIndex("test", "type", "" + i).setSource("test", "init"); + builder[i] = client().prepareIndex("test").setId("" + i).setSource("test", "init"); } indexRandom(true, builder); if (randomBoolean()) { @@ -342,7 +342,7 @@ public void testOpenCloseIndexWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test").setId("" + i).setSource("test", "init").execute().actionGet(); } for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE)) { @@ -398,7 +398,7 @@ public void testTranslogStats() throws Exception { final int nbDocs = randomIntBetween(0, 50); int uncommittedOps = 0; for (long i = 0; i < nbDocs; i++) { - final IndexResponse indexResponse = client().prepareIndex(indexName, "_doc", Long.toString(i)).setSource("field", i).get(); + final IndexResponse indexResponse = client().prepareIndex(indexName).setId(Long.toString(i)).setSource("field", i).get(); assertThat(indexResponse.status(), is(RestStatus.CREATED)); if (rarely()) { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/SimpleIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/SimpleIndexStateIT.java index 7f092bae7a79d..b75e36efe1f2f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/SimpleIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/SimpleIndexStateIT.java @@ -75,7 +75,7 @@ public void testSimpleOpenClose() { ); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); @@ -86,7 +86,7 @@ public void testSimpleOpenClose() { logger.info("--> trying to index into a closed index ..."); try { - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); fail(); } catch (IndexClosedException e) { // all is well @@ -109,7 +109,7 @@ public void testSimpleOpenClose() { ); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); } public void testFastCloseAfterCreateContinuesCreateAfterOpen() { @@ -150,7 +150,7 @@ public void testFastCloseAfterCreateContinuesCreateAfterOpen() { ); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); } public void testConsistencyAfterIndexCreationFailure() { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index cca01a9ec6dcb..07c8471e360f6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -152,8 +152,8 @@ public void testFieldDataStats() { .get() ); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", "value1", "field2", "value1").execute().actionGet(); - client().prepareIndex("test", "type", "2").setSource("field", "value2", "field2", "value2").execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field", "value1", "field2", "value1").execute().actionGet(); + client().prepareIndex("test").setId("2").setSource("field", "value2", "field2", "value2").execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats("data:true").setIndices(true).execute().actionGet(); @@ -275,8 +275,8 @@ public void testClearAllCaches() throws Exception { ); ensureGreen(); client().admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); - client().prepareIndex("test", "type", "1").setSource("field", "value1").execute().actionGet(); - client().prepareIndex("test", "type", "2").setSource("field", "value2").execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field", "value1").execute().actionGet(); + client().prepareIndex("test").setId("2").setSource("field", "value2").execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats("data:true").setIndices(true).execute().actionGet(); @@ -385,7 +385,8 @@ public void testQueryCache() throws Exception { while (true) { IndexRequestBuilder[] builders = new IndexRequestBuilder[pageDocs]; for (int i = 0; i < pageDocs; ++i) { - builders[i] = client().prepareIndex("idx", "type", Integer.toString(counter++)) + builders[i] = client().prepareIndex("idx") + .setId(Integer.toString(counter++)) .setSource(jsonBuilder().startObject().field("common", "field").field("str_value", "s" + i).endObject()); } indexRandom(true, builders); @@ -445,7 +446,8 @@ public void testQueryCache() throws Exception { // index the data again... IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("idx", "type", Integer.toString(i)) + builders[i] = client().prepareIndex("idx") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("common", "field").field("str_value", "s" + i).endObject()); } indexRandom(true, builders); @@ -577,7 +579,7 @@ public void testNonThrottleStats() throws Exception { sb.append(termUpto++); sb.append(" some random text that keeps repeating over and over again hambone"); } - client().prepareIndex("test", "type", "" + termUpto).setSource("field" + (i % 10), sb.toString()).get(); + client().prepareIndex("test").setId("" + termUpto).setSource("field" + (i % 10), sb.toString()).get(); } refresh(); stats = client().admin().indices().prepareStats().execute().actionGet(); @@ -613,7 +615,7 @@ public void testThrottleStats() throws Exception { sb.append(' '); sb.append(termUpto++); } - client().prepareIndex("test", "type", "" + termUpto).setSource("field" + (i % 10), sb.toString()).get(); + client().prepareIndex("test").setId("" + termUpto).setSource("field" + (i % 10), sb.toString()).get(); if (i % 2 == 0) { refresh(); } @@ -639,9 +641,9 @@ public void testSimpleStats() throws Exception { createIndex("test1", "test2"); ensureGreen(); - client().prepareIndex("test1", "type", Integer.toString(1)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test1", "type", Integer.toString(2)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test2", "type", Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(2)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test2").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); refresh(); NumShards test1 = getNumShards("test1"); @@ -733,7 +735,8 @@ public void testSimpleStats() throws Exception { // index failed try { - client().prepareIndex("test1", "type", Integer.toString(1)) + client().prepareIndex("test1") + .setId(Integer.toString(1)) .setSource("field", "value") .setVersion(1) .setVersionType(VersionType.EXTERNAL) @@ -742,7 +745,8 @@ public void testSimpleStats() throws Exception { fail("Expected a version conflict"); } catch (VersionConflictEngineException e) {} try { - client().prepareIndex("test2", "type", Integer.toString(1)) + client().prepareIndex("test2") + .setId(Integer.toString(1)) .setSource("field", "value") .setVersion(1) .setVersionType(VersionType.EXTERNAL) @@ -751,7 +755,8 @@ public void testSimpleStats() throws Exception { fail("Expected a version conflict"); } catch (VersionConflictEngineException e) {} try { - client().prepareIndex("test2", "type", Integer.toString(1)) + client().prepareIndex("test2") + .setId(Integer.toString(1)) .setSource("field", "value") .setVersion(1) .setVersionType(VersionType.EXTERNAL) @@ -791,7 +796,7 @@ public void testMergeStats() { assertThat(stats.getTotal().getSearch(), nullValue()); for (int i = 0; i < 20; i++) { - client().prepareIndex("test_index", "_doc", Integer.toString(i)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test_index").setId(Integer.toString(i)).setSource("field", "value").execute().actionGet(); client().admin().indices().prepareFlush().execute().actionGet(); } client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet(); @@ -837,9 +842,9 @@ public void testAllFlags() throws Exception { ensureGreen(); - client().prepareIndex("test_index", "_doc", Integer.toString(1)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test_index", "_doc", Integer.toString(2)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test_index_2", "type", Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test_index").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test_index").setId(Integer.toString(2)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test_index_2").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats(); @@ -964,9 +969,9 @@ public void testMultiIndex() throws Exception { ensureGreen(); - client().prepareIndex("test1", "_doc", Integer.toString(1)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test1", "_doc", Integer.toString(2)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test2", "_doc", Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(2)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test2").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); refresh(); int numShards1 = getNumShards("test1").totalNumShards; @@ -1008,7 +1013,7 @@ public void testCompletionFieldsParam() throws Exception { ); ensureGreen(); - client().prepareIndex("test1", "_doc", Integer.toString(1)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}", XContentType.JSON).get(); + client().prepareIndex("test1").setId(Integer.toString(1)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}", XContentType.JSON).get(); refresh(); IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats(); @@ -1050,7 +1055,7 @@ public void testGroupsParam() throws Exception { ensureGreen(); - client().prepareIndex("test1", "bar", Integer.toString(1)).setSource("foo", "bar").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(1)).setSource("foo", "bar").execute().actionGet(); refresh(); client().prepareSearch("_all").setStats("bar", "baz").execute().actionGet(); @@ -1210,8 +1215,8 @@ public void testFilterCacheStats() throws Exception { indexRandom( false, true, - client().prepareIndex("index", "type", "1").setSource("foo", "bar"), - client().prepareIndex("index", "type", "2").setSource("foo", "baz") + client().prepareIndex("index").setId("1").setSource("foo", "bar"), + client().prepareIndex("index").setId("2").setSource("foo", "baz") ); persistGlobalCheckpoint("index"); // Need to persist the global checkpoint for the soft-deletes retention MP. refresh(); @@ -1245,8 +1250,8 @@ public void testFilterCacheStats() throws Exception { assertThat(stats.getTotal().queryCache.getCacheSize(), greaterThan(0L)); }); - assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "type", "1").get().getResult()); - assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "type", "2").get().getResult()); + assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "1").get().getResult()); + assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "2").get().getResult()); // Here we are testing that a fully deleted segment should be dropped and its cached is evicted. // In order to instruct the merge policy not to keep a fully deleted segment, // we need to flush and make that commit safe so that the SoftDeletesPolicy can drop everything. @@ -1285,8 +1290,8 @@ public void testFilterCacheStats() throws Exception { indexRandom( true, - client().prepareIndex("index", "type", "1").setSource("foo", "bar"), - client().prepareIndex("index", "type", "2").setSource("foo", "baz") + client().prepareIndex("index").setId("1").setSource("foo", "bar"), + client().prepareIndex("index").setId("2").setSource("foo", "baz") ); assertBusy(() -> { @@ -1353,7 +1358,7 @@ public void testConcurrentIndexingAndStatsRequests() throws BrokenBarrierExcepti } while (!stop.get()) { final String id = Integer.toString(idGenerator.incrementAndGet()); - final IndexResponse response = client().prepareIndex("test", "type", id).setSource("{}", XContentType.JSON).get(); + final IndexResponse response = client().prepareIndex("test").setId(id).setSource("{}", XContentType.JSON).get(); assertThat(response.getResult(), equalTo(DocWriteResponse.Result.CREATED)); } }); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java index c4a4227c0bc9c..378657a6554b4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java @@ -182,10 +182,7 @@ public void testSimpleIndexTemplateTests() throws Exception { assertThat(response.getIndexTemplates(), hasSize(2)); // index something into test_index, will match on both templates - client().prepareIndex("test_index", "type1", "1") - .setSource("field1", "value1", "field2", "value 2") - .setRefreshPolicy(IMMEDIATE) - .get(); + client().prepareIndex("test_index").setId("1").setSource("field1", "value1", "field2", "value 2").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(); SearchResponse searchResponse = client().prepareSearch("test_index") @@ -200,10 +197,7 @@ public void testSimpleIndexTemplateTests() throws Exception { // field2 is not stored. assertThat(searchResponse.getHits().getAt(0).field("field2"), nullValue()); - client().prepareIndex("text_index", "type1", "1") - .setSource("field1", "value1", "field2", "value 2") - .setRefreshPolicy(IMMEDIATE) - .get(); + client().prepareIndex("text_index").setId("1").setSource("field1", "value1", "field2", "value 2").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(); // now only match on one template (template_1) @@ -570,11 +564,11 @@ public void testIndexTemplateWithAliases() throws Exception { assertAcked(prepareCreate("test_index")); ensureGreen(); - client().prepareIndex("test_index", "_doc", "1").setSource("type", "type1", "field", "A value").get(); - client().prepareIndex("test_index", "_doc", "2").setSource("type", "type2", "field", "B value").get(); - client().prepareIndex("test_index", "_doc", "3").setSource("type", "typeX", "field", "C value").get(); - client().prepareIndex("test_index", "_doc", "4").setSource("type", "typeY", "field", "D value").get(); - client().prepareIndex("test_index", "_doc", "5").setSource("type", "typeZ", "field", "E value").get(); + client().prepareIndex("test_index").setId("1").setSource("type", "type1", "field", "A value").get(); + client().prepareIndex("test_index").setId("2").setSource("type", "type2", "field", "B value").get(); + client().prepareIndex("test_index").setId("3").setSource("type", "typeX", "field", "C value").get(); + client().prepareIndex("test_index").setId("4").setSource("type", "typeY", "field", "D value").get(); + client().prepareIndex("test_index").setId("5").setSource("type", "typeZ", "field", "E value").get(); GetAliasesResponse getAliasesResponse = client().admin().indices().prepareGetAliases().setIndices("test_index").get(); assertThat(getAliasesResponse.getAliases().size(), equalTo(1)); @@ -637,8 +631,8 @@ public void testIndexTemplateWithAliasesInSource() { assertThat(getAliasesResponse.getAliases().size(), equalTo(1)); assertThat(getAliasesResponse.getAliases().get("test_index").size(), equalTo(1)); - client().prepareIndex("test_index", "_doc", "1").setSource("field", "value1").get(); - client().prepareIndex("test_index", "_doc", "2").setSource("field", "value2").get(); + client().prepareIndex("test_index").setId("1").setSource("field", "value1").get(); + client().prepareIndex("test_index").setId("2").setSource("field", "value2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test_index").get(); @@ -676,8 +670,8 @@ public void testIndexTemplateWithAliasesSource() { assertThat(getAliasesResponse.getAliases().size(), equalTo(1)); assertThat(getAliasesResponse.getAliases().get("test_index").size(), equalTo(3)); - client().prepareIndex("test_index", "_doc", "1").setSource("field", "value1").get(); - client().prepareIndex("test_index", "_doc", "2").setSource("field", "value2").get(); + client().prepareIndex("test_index").setId("1").setSource("field", "value1").get(); + client().prepareIndex("test_index").setId("2").setSource("field", "value2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test_index").get(); @@ -838,7 +832,7 @@ public void testStrictAliasParsingInIndicesCreatedViaTemplates() throws Exceptio .addAlias(new Alias("alias4").filter(termQuery("field", "value"))) .get(); - client().prepareIndex("a1", "test", "test").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("a1").setId("test").setSource("{}", XContentType.JSON).get(); BulkResponse response = client().prepareBulk().add(new IndexRequest("a2").id("test").source("{}", XContentType.JSON)).get(); assertThat(response.hasFailures(), is(false)); assertThat(response.getItems()[0].isFailed(), equalTo(false)); @@ -854,7 +848,7 @@ public void testStrictAliasParsingInIndicesCreatedViaTemplates() throws Exceptio // So the aliases defined in the index template for this index will not fail // even though the fields in the alias fields don't exist yet and indexing into // an index that doesn't exist yet will succeed - client().prepareIndex("b1", "test", "test").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("b1").setId("test").setSource("{}", XContentType.JSON).get(); response = client().prepareBulk().add(new IndexRequest("b2").id("test").source("{}", XContentType.JSON)).get(); assertThat(response.hasFailures(), is(false)); @@ -972,9 +966,9 @@ public void testMultipleTemplate() throws IOException { ) .get(); - client().prepareIndex("ax", "type1", "1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("ax").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); - client().prepareIndex("bx", "type1", "1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("bx").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java b/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java index 6317dd62418f3..2f666bbd65d4d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java @@ -256,14 +256,14 @@ public void test() throws Exception { assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); - client().prepareIndex("test", "type", "1").setPipeline("_id").setSource("field", "value", "fail", false).get(); + client().prepareIndex("test").setId("1").setPipeline("_id").setSource("field", "value", "fail", false).get(); Map doc = client().prepareGet("test", "1").get().getSourceAsMap(); assertThat(doc.get("field"), equalTo("value")); assertThat(doc.get("processed"), equalTo(true)); client().prepareBulk() - .add(client().prepareIndex("test", "type", "2").setSource("field", "value2", "fail", false).setPipeline("_id")) + .add(client().prepareIndex("test").setId("2").setSource("field", "value2", "fail", false).setPipeline("_id")) .get(); doc = client().prepareGet("test", "2").get().getSourceAsMap(); assertThat(doc.get("field"), equalTo("value2")); @@ -319,7 +319,7 @@ public void testWithDedicatedMaster() throws Exception { client().admin().cluster().putPipeline(putPipelineRequest).get(); BulkItemResponse item = client(masterOnlyNode).prepareBulk() - .add(client().prepareIndex("test", "type").setSource("field", "value2", "drop", true).setPipeline("_id")) + .add(client().prepareIndex("test").setSource("field", "value2", "drop", true).setPipeline("_id")) .get() .getItems()[0]; assertFalse(item.isFailed()); @@ -451,7 +451,7 @@ public void testPipelineProcessorOnFailure() throws Exception { client().admin().cluster().putPipeline(putPipelineRequest).get(); } - client().prepareIndex("test", "_doc").setId("1").setSource("{}", XContentType.JSON).setPipeline("1").get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).setPipeline("1").get(); Map inserted = client().prepareGet("test", "1").get().getSourceAsMap(); assertThat(inserted.get("readme"), equalTo("pipeline with id [3] is a bad pipeline")); } diff --git a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java index 3967f93f3a9b8..5b78c5686dc6a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java @@ -62,7 +62,8 @@ public class SimpleMgetIT extends OpenSearchIntegTestCase { public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException { createIndex("test"); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -99,7 +100,8 @@ public void testThatMgetShouldWorkWithMultiIndexAlias() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("multiIndexAlias"))); assertAcked(prepareCreate("test2").addAlias(new Alias("multiIndexAlias"))); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -139,7 +141,8 @@ public void testThatMgetShouldWorkWithAliasRouting() throws IOException { ) ); - client().prepareIndex("alias1", "test", "1") + client().prepareIndex("alias1") + .setId("1") .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -165,7 +168,7 @@ public void testThatSourceFilteringIsSupported() throws Exception { .endObject() ); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource(sourceBytesRef, XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource(sourceBytesRef, XContentType.JSON).get(); } MultiGetRequestBuilder request = client().prepareMultiGet(); @@ -212,7 +215,8 @@ public void testThatRoutingPerDocumentIsSupported() throws Exception { final String id = routingKeyForShard("test", 0); final String routingOtherShard = routingKeyForShard("test", 1); - client().prepareIndex("test", "test", id) + client().prepareIndex("test") + .setId(id) .setRefreshPolicy(IMMEDIATE) .setRouting(routingOtherShard) .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java index 4808371633cd0..15d1f3a0559a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java @@ -73,14 +73,16 @@ public void testFullRollingRestart() throws Exception { final String healthTimeout = "1m"; for (int i = 0; i < 1000; i++) { - client().prepareIndex("test", "type1", Long.toString(i)) + client().prepareIndex("test") + .setId(Long.toString(i)) .setSource(MapBuilder.newMapBuilder().put("test", "value" + i).map()) .execute() .actionGet(); } flush(); for (int i = 1000; i < 2000; i++) { - client().prepareIndex("test", "type1", Long.toString(i)) + client().prepareIndex("test") + .setId(Long.toString(i)) .setSource(MapBuilder.newMapBuilder().put("test", "value" + i).map()) .execute() .actionGet(); @@ -210,7 +212,8 @@ public void testNoRebalanceOnRollingRestart() throws Exception { ).get(); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Long.toString(i)) + client().prepareIndex("test") + .setId(Long.toString(i)) .setSource(MapBuilder.newMapBuilder().put("test", "value" + i).map()) .execute() .actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java index 9b26ee101909b..c5b0d99e6d275 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java @@ -150,13 +150,13 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> index 10 docs"); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } logger.info("--> flush so we have an actual index"); client().admin().indices().prepareFlush().execute().actionGet(); logger.info("--> index more docs so we have something in the translog"); for (int i = 10; i < 20; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } logger.info("--> verifying count"); @@ -376,12 +376,12 @@ public void indexShardStateChanged( List builders1 = new ArrayList<>(); for (int numDocs = randomIntBetween(10, 30); numDocs > 0; numDocs--) { - builders1.add(client().prepareIndex("test", "type").setSource("{}", XContentType.JSON)); + builders1.add(client().prepareIndex("test").setSource("{}", XContentType.JSON)); } List builders2 = new ArrayList<>(); for (int numDocs = randomIntBetween(10, 30); numDocs > 0; numDocs--) { - builders2.add(client().prepareIndex("test", "type").setSource("{}", XContentType.JSON)); + builders2.add(client().prepareIndex("test").setSource("{}", XContentType.JSON)); } logger.info("--> START relocate the shard from {} to {}", nodes[fromNode], nodes[toNode]); @@ -441,7 +441,7 @@ public void testCancellationCleansTempFiles() throws Exception { List requests = new ArrayList<>(); int numDocs = scaledRandomIntBetween(25, 250); for (int i = 0; i < numDocs; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("3").setWaitForGreenStatus().get().isTimedOut()); @@ -560,7 +560,7 @@ public void testIndexSearchAndRelocateConcurrently() throws Exception { for (int i = 0; i < numDocs; i++) { String id = randomRealisticUnicodeOfLength(10) + String.valueOf(i); ids.add(id); - docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i)); + docs[i] = client().prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(i)); } indexRandom(true, docs); SearchResponse countResponse = client().prepareSearch("test").get(); @@ -578,7 +578,7 @@ public void testIndexSearchAndRelocateConcurrently() throws Exception { for (int i = 0; i < numDocs; i++) { String id = randomRealisticUnicodeOfLength(10) + String.valueOf(numDocs + i); ids.add(id); - docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(numDocs + i)); + docs[i] = client().prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(numDocs + i)); } indexRandom(true, docs); @@ -614,13 +614,14 @@ public void testRelocateWhileWaitingForRefresh() { logger.info("--> index 10 docs"); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } logger.info("--> flush so we have an actual index"); client().admin().indices().prepareFlush().execute().actionGet(); logger.info("--> index more docs so we have something in the translog"); for (int i = 10; i < 20; i++) { - client().prepareIndex("test", "type", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) .setSource("field", "value" + i) .execute(); @@ -671,7 +672,7 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E logger.info("--> index 10 docs"); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } logger.info("--> flush so we have an actual index"); client().admin().indices().prepareFlush().execute().actionGet(); @@ -679,7 +680,8 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E final List> pendingIndexResponses = new ArrayList<>(); for (int i = 10; i < 20; i++) { pendingIndexResponses.add( - client().prepareIndex("test", "type", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) .setSource("field", "value" + i) .execute() @@ -706,7 +708,8 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E logger.info("--> index 100 docs while relocating"); for (int i = 20; i < 120; i++) { pendingIndexResponses.add( - client().prepareIndex("test", "type", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) .setSource("field", "value" + i) .execute() diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java index 43f21d9397580..45fb1a8fc58c0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java @@ -122,7 +122,7 @@ public void testCancelRecoveryAndResume() throws Exception { List builder = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); - builder.add(client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i), "the_id", id)); + builder.add(client().prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(i), "the_id", id)); } indexRandom(true, builder); for (int i = 0; i < numDocs; i++) { diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/AliasResolveRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/AliasResolveRoutingIT.java index 1c2bdf2e3a09b..eb929fd28d2ef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/AliasResolveRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/AliasResolveRoutingIT.java @@ -61,9 +61,9 @@ public void testSearchClosedWildcardIndex() throws ExecutionException, Interrupt client().admin().indices().prepareClose("test-1").get(); indexRandom( true, - client().prepareIndex("test-0", "type1", "1").setSource("field1", "the quick brown fox jumps"), - client().prepareIndex("test-0", "type1", "2").setSource("field1", "quick brown"), - client().prepareIndex("test-0", "type1", "3").setSource("field1", "quick") + client().prepareIndex("test-0").setId("1").setSource("field1", "the quick brown fox jumps"), + client().prepareIndex("test-0").setId("2").setSource("field1", "quick brown"), + client().prepareIndex("test-0").setId("3").setSource("field1", "quick") ); refresh("test-*"); assertHitCount( diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/AliasRoutingIT.java index a1dd32aa300c9..274133c2c8239 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/AliasRoutingIT.java @@ -63,7 +63,7 @@ public void testAliasCrudRouting() throws Exception { assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("test").alias("alias0").routing("0"))); logger.info("--> indexing with id [1], and routing [0] using alias"); - client().prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias0").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); @@ -79,7 +79,7 @@ public void testAliasCrudRouting() throws Exception { } logger.info("--> updating with id [1] and routing through alias"); - client().prepareUpdate("alias0", "type1", "1") + client().prepareUpdate("alias0", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") .execute() @@ -93,7 +93,7 @@ public void testAliasCrudRouting() throws Exception { } logger.info("--> deleting with no routing, should not delete anything"); - client().prepareDelete("test", "type1", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); assertThat(client().prepareGet("test", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); @@ -101,7 +101,7 @@ public void testAliasCrudRouting() throws Exception { } logger.info("--> deleting with routing alias, should delete"); - client().prepareDelete("alias0", "type1", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete("alias0", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); assertThat(client().prepareGet("test", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false)); @@ -109,7 +109,7 @@ public void testAliasCrudRouting() throws Exception { } logger.info("--> indexing with id [1], and routing [0] using alias"); - client().prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias0").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); @@ -134,7 +134,7 @@ public void testAliasSearchRouting() throws Exception { ); logger.info("--> indexing with id [1], and routing [0] using alias"); - client().prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias0").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); @@ -245,7 +245,7 @@ public void testAliasSearchRouting() throws Exception { } logger.info("--> indexing with id [2], and routing [1] using alias"); - client().prepareIndex("alias1", "type1", "2").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias1").setId("2").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { @@ -491,7 +491,7 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { ); ensureGreen(); // wait for events again to make sure we got the aliases on all nodes logger.info("--> indexing with id [1], and routing [0] using alias to test-a"); - client().prepareIndex("alias-a0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias-a0").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test-a", "1").execute().actionGet().isExists(), equalTo(false)); @@ -502,7 +502,7 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { } logger.info("--> indexing with id [0], and routing [1] using alias to test-b"); - client().prepareIndex("alias-b1", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias-b1").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test-a", "1").execute().actionGet().isExists(), equalTo(false)); @@ -594,9 +594,9 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue2682() thro assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("index").alias("index_1").routing("1"))); logger.info("--> indexing on index_1 which is an alias for index with routing [1]"); - client().prepareIndex("index_1", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index_1").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> indexing on index_2 which is a concrete index"); - client().prepareIndex("index_2", "type2", "2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index_2").setId("2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> search all on index_* should find two"); for (int i = 0; i < 5; i++) { @@ -625,9 +625,9 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue3268() thro assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("index").alias("index_1").routing("1"))); logger.info("--> indexing on index_1 which is an alias for index with routing [1]"); - client().prepareIndex("index_1", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index_1").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> indexing on index_2 which is a concrete index"); - client().prepareIndex("index_2", "type2", "2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index_2").setId("2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index_*") .setSearchType(SearchType.QUERY_THEN_FETCH) @@ -650,7 +650,7 @@ public void testIndexingAliasesOverTime() throws Exception { assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("test").alias("alias").routing("3"))); logger.info("--> indexing with id [0], and routing [3]"); - client().prepareIndex("alias", "type1", "0").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias").setId("0").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); logger.info("--> verifying get and search with routing, should find"); @@ -712,7 +712,7 @@ public void testIndexingAliasesOverTime() throws Exception { ); logger.info("--> indexing with id [1], and routing [4]"); - client().prepareIndex("alias", "type1", "1").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias").setId("1").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); logger.info("--> verifying get and search with routing, should find"); diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java index 905445b2d7aeb..99742166cda7f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java @@ -37,6 +37,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; import org.mockito.internal.util.collections.Sets; @@ -62,7 +63,11 @@ public void testVariousPartitionSizes() throws Exception { .put("index.number_of_routing_shards", shards) .put("index.routing_partition_size", partitionSize) ) - .addMapping("type", "{\"type\":{\"_routing\":{\"required\":true}}}", XContentType.JSON) + .addMapping( + MapperService.SINGLE_MAPPING_NAME, + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"_routing\":{\"required\":true}}}", + XContentType.JSON + ) .execute() .actionGet(); ensureGreen(); @@ -96,7 +101,11 @@ public void testShrinking() throws Exception { .put("index.number_of_replicas", numberOfReplicas()) .put("index.routing_partition_size", partitionSize) ) - .addMapping("type", "{\"type\":{\"_routing\":{\"required\":true}}}", XContentType.JSON) + .addMapping( + MapperService.SINGLE_MAPPING_NAME, + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"_routing\":{\"required\":true}}}", + XContentType.JSON + ) .execute() .actionGet(); ensureGreen(); @@ -249,7 +258,7 @@ private Map> generateRoutedDocumentIds(String index) { String id = routingValue + "_" + String.valueOf(k); routingToDocumentIds.get(routingValue).add(id); - client().prepareIndex(index, "type", id).setRouting(routingValue).setSource("foo", "bar").get(); + client().prepareIndex(index).setId(id).setRouting(routingValue).setSource("foo", "bar").get(); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java index 6e9498d177aaf..f3179a0c4acb3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java @@ -93,7 +93,8 @@ public void testSimpleCrudRouting() throws Exception { ensureGreen(); String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setRouting(routingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -108,21 +109,22 @@ public void testSimpleCrudRouting() throws Exception { } logger.info("--> deleting with no routing, should not delete anything"); - client().prepareDelete("test", "type1", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); assertThat(client().prepareGet("test", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); } logger.info("--> deleting with routing, should delete"); - client().prepareDelete("test", "type1", "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete("test", "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); assertThat(client().prepareGet("test", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(false)); } logger.info("--> indexing with id [1], and routing [0]"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setRouting(routingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -143,7 +145,8 @@ public void testSimpleSearchRouting() { String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setRouting(routingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -217,7 +220,8 @@ public void testSimpleSearchRouting() { String secondRoutingValue = "1"; logger.info("--> indexing with id [{}], and routing [{}]", routingValue, secondRoutingValue); - client().prepareIndex("test", "type1", routingValue) + client().prepareIndex("test") + .setId(routingValue) .setRouting(secondRoutingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -364,7 +368,8 @@ public void testRequiredRoutingCrudApis() throws Exception { String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex(indexOrAlias(), "type1", "1") + client().prepareIndex(indexOrAlias()) + .setId("1") .setRouting(routingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -373,7 +378,7 @@ public void testRequiredRoutingCrudApis() throws Exception { logger.info("--> indexing with id [1], with no routing, should fail"); try { - client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1").get(); + client().prepareIndex(indexOrAlias()).setId("1").setSource("field", "value1").get(); fail("index with missing routing when routing is required should fail"); } catch (OpenSearchException e) { assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class)); @@ -386,7 +391,7 @@ public void testRequiredRoutingCrudApis() throws Exception { logger.info("--> deleting with no routing, should fail"); try { - client().prepareDelete(indexOrAlias(), "type1", "1").get(); + client().prepareDelete(indexOrAlias(), "1").get(); fail("delete with missing routing when routing is required should fail"); } catch (OpenSearchException e) { assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class)); @@ -404,19 +409,13 @@ public void testRequiredRoutingCrudApis() throws Exception { } try { - client().prepareUpdate(indexOrAlias(), "type1", "1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") - .execute() - .actionGet(); + client().prepareUpdate(indexOrAlias(), "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").execute().actionGet(); fail("update with missing routing when routing is required should fail"); } catch (OpenSearchException e) { assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class)); } - client().prepareUpdate(indexOrAlias(), "type1", "1") - .setRouting(routingValue) - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") - .get(); + client().prepareUpdate(indexOrAlias(), "1").setRouting(routingValue).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").get(); client().admin().indices().prepareRefresh().execute().actionGet(); for (int i = 0; i < 5; i++) { @@ -432,7 +431,7 @@ public void testRequiredRoutingCrudApis() throws Exception { assertThat(getResponse.getSourceAsMap().get("field"), equalTo("value2")); } - client().prepareDelete(indexOrAlias(), "type1", "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete(indexOrAlias(), "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { try { @@ -561,9 +560,10 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { ensureGreen(); String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex(indexOrAlias(), "type1", "1").setRouting(routingValue).setSource("field", "value1").get(); + client().prepareIndex(indexOrAlias()).setId("1").setRouting(routingValue).setSource("field", "value1").get(); logger.info("--> indexing with id [2], and routing [{}]", routingValue); - client().prepareIndex(indexOrAlias(), "type1", "2") + client().prepareIndex(indexOrAlias()) + .setId("2") .setRouting(routingValue) .setSource("field", "value2") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -608,7 +608,7 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]")); } - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setRouting(routingValue) .setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1") .get(); @@ -616,7 +616,7 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { assertThat(updateResponse.getVersion(), equalTo(2L)); try { - client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1").get(); + client().prepareUpdate(indexOrAlias(), "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1").get(); fail(); } catch (RoutingMissingException e) { assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]")); diff --git a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java index 8c4714aaf91f9..b4823bb482bfa 100644 --- a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java @@ -90,17 +90,14 @@ protected Collection> nodePlugins() { } public void testPainlessCompilationLimit429Error() throws Exception { - client().prepareIndex("test", "1") - .setId("1") - .setSource(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) - .get(); + client().prepareIndex("test").setId("1").setSource(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()).get(); ensureGreen(); Map params = new HashMap<>(); params.put("field", "field"); Script script = new Script(ScriptType.INLINE, "mockscript", "increase_field", params); ExecutionException exception = expectThrows( ExecutionException.class, - () -> client().prepareUpdate("test", "1", "1").setScript(script).execute().get() + () -> client().prepareUpdate("test", "1").setScript(script).execute().get() ); Throwable rootCause = getRootCause(exception); assertTrue(rootCause instanceof OpenSearchException); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java index 37e7e10968983..da5698918cf99 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java @@ -113,7 +113,7 @@ private void indexTestData() { // Make sure we have a few segments BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int j = 0; j < 20; j++) { - bulkRequestBuilder.add(client().prepareIndex("test", "type", Integer.toString(i * 5 + j)).setSource("field", "value")); + bulkRequestBuilder.add(client().prepareIndex("test").setId(Integer.toString(i * 5 + j)).setSource("field", "value")); } assertNoFailures(bulkRequestBuilder.get()); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java index 3c0b948adfb9b..049dcb50024ba 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java @@ -68,7 +68,7 @@ protected Settings nodeSettings(int nodeOrdinal) { public void testSimpleTimeout() throws Exception { for (int i = 0; i < 32; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").get(); } refresh("test"); @@ -81,7 +81,7 @@ public void testSimpleTimeout() throws Exception { } public void testPartialResultsIntolerantTimeout() throws Exception { - client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); OpenSearchException ex = expectThrows( OpenSearchException.class, diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java index 15892b6378d42..30e6aa4cd31fc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java @@ -61,7 +61,7 @@ public void testOpenContextsAfterRejections() throws Exception { ensureGreen("test"); final int docs = scaledRandomIntBetween(20, 50); for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").get(); } IndicesStatsResponse indicesStats = client().admin().indices().prepareStats().get(); assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0L)); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java index 03a5c4d71da4d..96f3f710e4b7d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java @@ -63,7 +63,7 @@ public void testStressReaper() throws ExecutionException, InterruptedException { int num = randomIntBetween(100, 150); IndexRequestBuilder[] builders = new IndexRequestBuilder[num]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type", "" + i).setSource("f", English.intToEnglish(i)); + builders[i] = client().prepareIndex("test").setId("" + i).setSource("f", English.intToEnglish(i)); } createIndex("test"); indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java index 3af781d69efdc..303b84151cf3e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java @@ -56,7 +56,7 @@ public void setupSuiteScopeCluster() throws Exception { numDocs = randomIntBetween(1, 20); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { - docs.add(client().prepareIndex("index", "type").setSource("f", Integer.toString(i / 3))); + docs.add(client().prepareIndex("index").setSource("f", Integer.toString(i / 3))); } indexRandom(true, docs); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java index a786a59a5a9c1..f49938fb27e72 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java @@ -73,11 +73,11 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t String name = "name_" + randomIntBetween(1, 10); if (rarely()) { missingValues++; - builders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder().startObject().field("name", name).endObject()); + builders[i] = client().prepareIndex("idx").setSource(jsonBuilder().startObject().field("name", name).endObject()); } else { int value = randomIntBetween(1, 10); values.put(value, values.getOrDefault(value, 0) + 1); - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource(jsonBuilder().startObject().field("name", name).field("value", value).endObject()); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java index 96cec5257ada8..29c325d01492e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java @@ -152,7 +152,7 @@ public void testRandomRanges() throws Exception { source = source.value(docs[i][j]); } source = source.endArray().endObject(); - client().prepareIndex("idx", "type").setSource(source).get(); + client().prepareIndex("idx").setSource(source).get(); } assertNoFailures(client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get()); @@ -283,7 +283,7 @@ public void testDuelTerms() throws Exception { source = source.value(Integer.toString(values[j])); } source = source.endArray().endObject(); - indexingRequests.add(client().prepareIndex("idx", "type").setSource(source)); + indexingRequests.add(client().prepareIndex("idx").setSource(source)); } indexRandom(true, indexingRequests); @@ -387,7 +387,7 @@ public void testDuelTermsHistogram() throws Exception { source = source.value(randomFrom(values)); } source = source.endArray().endObject(); - client().prepareIndex("idx", "type").setSource(source).get(); + client().prepareIndex("idx").setSource(source).get(); } assertNoFailures( client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get() @@ -439,7 +439,7 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { logger.info("Indexing [{}] docs", numDocs); List indexingRequests = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { - indexingRequests.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource("double_value", randomDouble())); + indexingRequests.add(client().prepareIndex("idx").setId(Integer.toString(i)).setSource("double_value", randomDouble())); } indexRandom(true, indexingRequests); @@ -458,7 +458,7 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { public void testReduce() throws Exception { createIndex("idx"); final int value = randomIntBetween(0, 10); - indexRandom(true, client().prepareIndex("idx", "type").setSource("f", value)); + indexRandom(true, client().prepareIndex("idx").setSource("f", value)); SearchResponse response = client().prepareSearch("idx") .addAggregation( filter("filter", QueryBuilders.matchAllQuery()).subAggregation( @@ -518,7 +518,7 @@ public void testDuelDepthBreadthFirst() throws Exception { final int v1 = randomInt(1 << randomInt(7)); final int v2 = randomInt(1 << randomInt(7)); final int v3 = randomInt(1 << randomInt(7)); - reqs.add(client().prepareIndex("idx", "type").setSource("f1", v1, "f2", v2, "f3", v3)); + reqs.add(client().prepareIndex("idx").setSource("f1", v1, "f2", v2, "f3", v3)); } indexRandom(true, reqs); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java index 5086468b6a673..3b8431c50a3ee 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java @@ -52,9 +52,9 @@ public class FiltersAggsRewriteIT extends OpenSearchSingleNodeTestCase { public void testWrapperQueryIsRewritten() throws IOException { createIndex("test", Settings.EMPTY, "test", "title", "type=text"); - client().prepareIndex("test", "test", "1").setSource("title", "foo bar baz").get(); - client().prepareIndex("test", "test", "2").setSource("title", "foo foo foo").get(); - client().prepareIndex("test", "test", "3").setSource("title", "bar baz bax").get(); + client().prepareIndex("test").setId("1").setSource("title", "foo bar baz").get(); + client().prepareIndex("test").setId("2").setSource("title", "foo foo foo").get(); + client().prepareIndex("test").setId("3").setSource("title", "bar baz bax").get(); client().admin().indices().prepareRefresh("test").get(); XContentType xContentType = randomFrom(XContentType.values()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java index e1c339c38d1da..c00152a54bd37 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java @@ -57,7 +57,7 @@ public void testMetadataSetOnAggregationResult() throws Exception { IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)]; for (int i = 0; i < builders.length; i++) { String name = "name_" + randomIntBetween(1, 10); - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource(jsonBuilder().startObject().field("name", name).field("value", randomInt()).endObject()); } indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java index a2831d9929f8b..9135ca0f0a364 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java @@ -72,8 +72,9 @@ protected void setupSuiteScopeCluster() throws Exception { ); indexRandom( true, - client().prepareIndex("idx", "type", "1").setSource(), - client().prepareIndex("idx", "type", "2") + client().prepareIndex("idx").setId("1").setSource(), + client().prepareIndex("idx") + .setId("2") .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2") ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java index 0298a39ac37fa..5d54359152816 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java @@ -92,19 +92,19 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numTag1Docs; i++) { numSingleTag1Docs++; XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { // randomly index the document twice so that we have deleted // docs that match the filter - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs; i < (numTag1Docs + numTag2Docs); i++) { numSingleTag2Docs++; XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag2").endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs + numTag2Docs; i < numDocs; i++) { @@ -112,15 +112,16 @@ public void setupSuiteScopeCluster() throws Exception { numTag1Docs++; numTag2Docs++; XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).array("tag", "tag1", "tag2").endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java index ee406fd94d149..fc5407c4cade8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java @@ -86,7 +86,7 @@ public void setupSuiteScopeCluster() throws Exception { default: throw new AssertionError(); } - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, singleValue) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index 9c7e2be8b9121..971afdd20e1fa 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -113,7 +113,7 @@ private static String format(ZonedDateTime date, String pattern) { } private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { - return client().prepareIndex(idx, "type") + return client().prepareIndex(idx) .setSource( jsonBuilder().startObject() .timeField("date", date) @@ -127,7 +127,7 @@ private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) } private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx", "type") + return client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("value", value) @@ -149,7 +149,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } @@ -191,44 +192,44 @@ private void getMultiSortDocs(List builders) throws IOExcep assertAcked(client().admin().indices().prepareCreate("sort_idx").addMapping("type", "date", "type=date").get()); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 2)).field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 3)).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 4)).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 5)).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 6)).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 7)).field("l", 5).field("d", 1).endObject()) ); } @@ -1042,7 +1043,8 @@ public void testSingleValueWithTimeZone() throws Exception { IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; ZonedDateTime date = date("2014-03-11T00:00:00+00:00"); for (int i = 0; i < reqs.length; i++) { - reqs[i] = client().prepareIndex("idx2", "type", "" + i) + reqs[i] = client().prepareIndex("idx2") + .setId("" + i) .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); date = date.plusHours(1); } @@ -1327,7 +1329,8 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; for (int i = 0; i < reqs.length; i++) { - reqs[i] = client().prepareIndex("idx2", "type", "" + i) + reqs[i] = client().prepareIndex("idx2") + .setId("" + i) .setSource(jsonBuilder().startObject().field("date", "10-03-2014").endObject()); } indexRandom(true, reqs); @@ -1397,8 +1400,8 @@ public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionExc assertAcked(client().admin().indices().prepareCreate("test9491").addMapping("type", "d", "type=date").get()); indexRandom( true, - client().prepareIndex("test9491", "type").setSource("d", "2014-10-08T13:00:00Z"), - client().prepareIndex("test9491", "type").setSource("d", "2014-11-08T13:00:00Z") + client().prepareIndex("test9491").setSource("d", "2014-10-08T13:00:00Z"), + client().prepareIndex("test9491").setSource("d", "2014-11-08T13:00:00Z") ); ensureSearchable("test9491"); SearchResponse response = client().prepareSearch("test9491") @@ -1420,9 +1423,9 @@ public void testIssue8209() throws InterruptedException, ExecutionException { assertAcked(client().admin().indices().prepareCreate("test8209").addMapping("type", "d", "type=date").get()); indexRandom( true, - client().prepareIndex("test8209", "type").setSource("d", "2014-01-01T00:00:00Z"), - client().prepareIndex("test8209", "type").setSource("d", "2014-04-01T00:00:00Z"), - client().prepareIndex("test8209", "type").setSource("d", "2014-04-30T00:00:00Z") + client().prepareIndex("test8209").setSource("d", "2014-01-01T00:00:00Z"), + client().prepareIndex("test8209").setSource("d", "2014-04-01T00:00:00Z"), + client().prepareIndex("test8209").setSource("d", "2014-04-30T00:00:00Z") ); ensureSearchable("test8209"); SearchResponse response = client().prepareSearch("test8209") @@ -1471,7 +1474,7 @@ public void testExceptionOnNegativeInterval() { */ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionException { String indexDateUnmapped = "test31760"; - indexRandom(true, client().prepareIndex(indexDateUnmapped, "_doc").setSource("foo", "bar")); + indexRandom(true, client().prepareIndex(indexDateUnmapped).setSource("foo", "bar")); ensureSearchable(indexDateUnmapped); SearchResponse response = client().prepareSearch(indexDateUnmapped) @@ -1499,7 +1502,7 @@ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionExce public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, ExecutionException { String index = "test31392"; assertAcked(client().admin().indices().prepareCreate(index).addMapping("type", "d", "type=date,format=epoch_millis").get()); - indexRandom(true, client().prepareIndex(index, "type").setSource("d", "1477954800000")); + indexRandom(true, client().prepareIndex(index).setSource("d", "1477954800000")); ensureSearchable(index); SearchResponse response = client().prepareSearch(index) .addAggregation( @@ -1616,8 +1619,8 @@ public void testScriptCaching() throws Exception { String date2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(2, 1)); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date), - client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date2) + client().prepareIndex("cache_test_idx").setId("1").setSource("d", date), + client().prepareIndex("cache_test_idx").setId("2").setSource("d", date2) ); // Make sure we are starting with a clear cache @@ -1829,8 +1832,8 @@ private ZonedDateTime key(Histogram.Bucket bucket) { */ public void testDateNanosHistogram() throws Exception { assertAcked(prepareCreate("nanos").addMapping("_doc", "date", "type=date_nanos").get()); - indexRandom(true, client().prepareIndex("nanos", "_doc", "1").setSource("date", "2000-01-01")); - indexRandom(true, client().prepareIndex("nanos", "_doc", "2").setSource("date", "2000-01-02")); + indexRandom(true, client().prepareIndex("nanos").setId("1").setSource("date", "2000-01-01")); + indexRandom(true, client().prepareIndex("nanos").setId("2").setSource("date", "2000-01-02")); // Search interval 24 hours SearchResponse r = client().prepareSearch("nanos") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 5334709e60cd5..2505cb48245c3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -85,7 +85,8 @@ private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, i IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours]; for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) { - reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i) + reqs[i - idxIdStart] = client().prepareIndex("idx2") + .setId("" + i) .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); date = date.plusHours(stepSizeHours); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java index 9c6ba1495a89c..7a28df00980cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java @@ -79,7 +79,7 @@ public class DateRangeIT extends OpenSearchIntegTestCase { private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx", "type") + return client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("value", value) @@ -128,7 +128,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer")); for (int i = 0; i < 2; i++) { docs.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } @@ -918,9 +919,11 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1") + client().prepareIndex("cache_test_idx") + .setId("1") .setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).endObject()), - client().prepareIndex("cache_test_idx", "type", "2") + client().prepareIndex("cache_test_idx") + .setId("2") .setSource(jsonBuilder().startObject().timeField("date", date(2, 1)).endObject()) ); @@ -1070,9 +1073,9 @@ public void testRangeWithFormatStringValue() throws Exception { assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=strict_hour_minute_second")); indexRandom( true, - client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", "00:16:40").endObject()), - client().prepareIndex(indexName, "type", "2").setSource(jsonBuilder().startObject().field("date", "00:33:20").endObject()), - client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", "00:50:00").endObject()) + client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", "00:16:40").endObject()), + client().prepareIndex(indexName).setId("2").setSource(jsonBuilder().startObject().field("date", "00:33:20").endObject()), + client().prepareIndex(indexName).setId("3").setSource(jsonBuilder().startObject().field("date", "00:50:00").endObject()) ); // using no format should work when to/from is compatible with format in @@ -1132,9 +1135,9 @@ public void testRangeWithFormatNumericValue() throws Exception { assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=epoch_second")); indexRandom( true, - client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()), - client().prepareIndex(indexName, "type", "2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()), - client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3008).endObject()) + client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()), + client().prepareIndex(indexName).setId("2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()), + client().prepareIndex(indexName).setId("3").setSource(jsonBuilder().startObject().field("date", 3008).endObject()) ); // using no format should work when to/from is compatible with format in diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java index 39ddac46c8220..aa4bb671d14e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -106,10 +106,12 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < data.length; i++) { String[] parts = data[i].split(","); - client().prepareIndex("test", "book", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource("author", parts[5], "name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); - client().prepareIndex("idx_unmapped_author", "book", "" + i) + client().prepareIndex("idx_unmapped_author") + .setId("" + i) .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java index 58fce68b12a5a..6c6e6ccc679e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java @@ -147,7 +147,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < NUM_DOCS; i++) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, (double) i) @@ -164,7 +164,7 @@ public void setupSuiteScopeCluster() throws Exception { } for (int i = 0; i < 100; i++) { builders.add( - client().prepareIndex("high_card_idx", "type") + client().prepareIndex("high_card_idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, (double) i) @@ -181,7 +181,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } @@ -240,44 +241,44 @@ private void getMultiSortDocs(List builders) throws IOExcep assertAcked(prepareCreate("sort_idx").addMapping("multi_sort_type", SINGLE_VALUED_FIELD_NAME, "type=double")); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) ); } @@ -985,8 +986,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1.5), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2.5) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1.5), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2.5) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java index 5af682ac1b68c..b938db8891d7b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java @@ -73,7 +73,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numTag1Docs; i++) { builders.add( - client().prepareIndex("idx", "type", "" + i) + client().prepareIndex("idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject()) ); } @@ -83,16 +84,17 @@ public void setupSuiteScopeCluster() throws Exception { .field("tag", "tag2") .field("name", "name" + i) .endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { // randomly index the document twice so that we have deleted docs that match the filter - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java index 4ab8e725551af..0b895f32a1259 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java @@ -80,10 +80,10 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numTag1Docs; i++) { XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { // randomly index the document twice so that we have deleted docs that match the filter - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs; i < (numTag1Docs + numTag2Docs); i++) { @@ -92,9 +92,9 @@ public void setupSuiteScopeCluster() throws Exception { .field("tag", "tag2") .field("name", "name" + i) .endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs + numTag2Docs; i < numDocs; i++) { @@ -104,15 +104,16 @@ public void setupSuiteScopeCluster() throws Exception { .field("tag", "tag3") .field("name", "name" + i) .endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java index e11eca4690234..a2d6533ae0afb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java @@ -87,7 +87,7 @@ private IndexRequestBuilder indexCity(String idx, String name, String... latLons } source.endArray(); source = source.endObject(); - return client().prepareIndex(idx, "type").setSource(source); + return client().prepareIndex(idx).setSource(source); } @Override @@ -142,7 +142,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).field("location", "52.0945, 5.116").endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java index 3331748d48fd5..c7c21c203af61 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java @@ -88,7 +88,7 @@ private static IndexRequestBuilder indexCity(String index, String name, List builders) throws IOExcep assertAcked(client().admin().indices().prepareCreate("sort_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=double").get()); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3.8).field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4.4).field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5.1).field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) ); } @@ -1126,8 +1127,8 @@ public void testDecimalIntervalAndOffset() throws Exception { assertAcked(prepareCreate("decimal_values").addMapping("type", "d", "type=float").get()); indexRandom( true, - client().prepareIndex("decimal_values", "type", "1").setSource("d", -0.6), - client().prepareIndex("decimal_values", "type", "2").setSource("d", 0.1) + client().prepareIndex("decimal_values").setId("1").setSource("d", -0.6), + client().prepareIndex("decimal_values").setId("2").setSource("d", 0.1) ); SearchResponse r = client().prepareSearch("decimal_values") @@ -1156,8 +1157,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("d", -0.6), - client().prepareIndex("cache_test_idx", "type", "2").setSource("d", 0.1) + client().prepareIndex("cache_test_idx").setId("1").setSource("d", -0.6), + client().prepareIndex("cache_test_idx").setId("2").setSource("d", 0.1) ); // Make sure we are starting with a clear cache @@ -1351,9 +1352,9 @@ public void testHardBounds() throws Exception { assertAcked(prepareCreate("test").addMapping("type", "d", "type=double").get()); indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("d", -0.6), - client().prepareIndex("test", "type", "2").setSource("d", 0.5), - client().prepareIndex("test", "type", "3").setSource("d", 0.1) + client().prepareIndex("test").setId("1").setSource("d", -0.6), + client().prepareIndex("test").setId("2").setSource("d", 0.5), + client().prepareIndex("test").setId("3").setSource("d", 0.1) ); SearchResponse r = client().prepareSearch("test") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java index a13017b130c4e..b768631225b90 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java @@ -75,9 +75,10 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom( true, - client().prepareIndex("idx", "type", "1").setSource("ip", "192.168.1.7", "ips", Arrays.asList("192.168.0.13", "192.168.1.2")), - client().prepareIndex("idx", "type", "2").setSource("ip", "192.168.1.10", "ips", Arrays.asList("192.168.1.25", "192.168.1.28")), - client().prepareIndex("idx", "type", "3") + client().prepareIndex("idx").setId("1").setSource("ip", "192.168.1.7", "ips", Arrays.asList("192.168.0.13", "192.168.1.2")), + client().prepareIndex("idx").setId("2").setSource("ip", "192.168.1.10", "ips", Arrays.asList("192.168.1.25", "192.168.1.28")), + client().prepareIndex("idx") + .setId("3") .setSource("ip", "2001:db8::ff00:42:8329", "ips", Arrays.asList("2001:db8::ff00:42:8329", "2001:db8::ff00:42:8380")) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java index 6d5e75f613649..53ff70dd240d1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java @@ -79,9 +79,9 @@ public void testScriptValue() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); indexRandom( true, - client().prepareIndex("index", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "3").setSource("ip", "2001:db8::2:1") + client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("3").setSource("ip", "2001:db8::2:1") ); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['ip'].value", Collections.emptyMap()); @@ -107,9 +107,9 @@ public void testScriptValues() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); indexRandom( true, - client().prepareIndex("index", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "3").setSource("ip", "2001:db8::2:1") + client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("3").setSource("ip", "2001:db8::2:1") ); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['ip']", Collections.emptyMap()); @@ -135,10 +135,10 @@ public void testMissingValue() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); indexRandom( true, - client().prepareIndex("index", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "3").setSource("ip", "127.0.0.1"), - client().prepareIndex("index", "type", "4").setSource("not_ip", "something") + client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("3").setSource("ip", "127.0.0.1"), + client().prepareIndex("index").setId("4").setSource("not_ip", "something") ); SearchResponse response = client().prepareSearch("index") .addAggregation(AggregationBuilders.terms("my_terms").field("ip").missing("127.0.0.1").executionHint(randomExecutionHint())) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java index 804b0cae93de7..115b30643ff21 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java @@ -133,7 +133,7 @@ public void setupSuiteScopeCluster() throws Exception { createIndex("idx", "high_card_idx"); IndexRequestBuilder[] lowCardBuilders = new IndexRequestBuilder[NUM_DOCS]; for (int i = 0; i < lowCardBuilders.length; i++) { - lowCardBuilders[i] = client().prepareIndex("idx", "type") + lowCardBuilders[i] = client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, i) @@ -149,7 +149,7 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom(true, lowCardBuilders); IndexRequestBuilder[] highCardBuilders = new IndexRequestBuilder[100]; // TODO randomize the size? for (int i = 0; i < highCardBuilders.length; i++) { - highCardBuilders[i] = client().prepareIndex("high_card_idx", "type") + highCardBuilders[i] = client().prepareIndex("high_card_idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, i) @@ -168,7 +168,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } @@ -227,44 +228,44 @@ private void getMultiSortDocs(List builders) throws IOExcep createIndex("sort_idx"); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) ); } @@ -933,8 +934,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java index c02a5107192a1..47cddbf856090 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java @@ -139,7 +139,7 @@ public void setupSuiteScopeCluster() throws Exception { final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20); for (int j = 0; j < frequency; ++j) { indexRequests.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("s", stringTerm) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java index de6db070e1a89..f03a3bdeb1716 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java @@ -143,7 +143,7 @@ public void setupSuiteScopeCluster() throws Exception { if (randomBoolean()) { source.field("numeric_value", randomDouble()); } - client().prepareIndex("idx", "type").setSource(source.endObject()).get(); + client().prepareIndex("idx").setSource(source.endObject()).get(); } refresh(); ensureSearchable(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java index 2f03dbbf01c1d..256281f8c6833 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java @@ -123,14 +123,15 @@ public void setupSuiteScopeCluster() throws Exception { source = source.startObject().field("value", i + 1 + j).endObject(); } source = source.endArray().endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i + 1).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i + 1).setSource(source)); } prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "nested", "type=nested").get(); ensureGreen("empty_bucket_idx"); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource( jsonBuilder().startObject() .field("value", i * 2) @@ -178,7 +179,8 @@ public void setupSuiteScopeCluster() throws Exception { ensureGreen("idx_nested_nested_aggs"); builders.add( - client().prepareIndex("idx_nested_nested_aggs", "type", "1") + client().prepareIndex("idx_nested_nested_aggs") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("nested1") @@ -458,7 +460,8 @@ public void testParentFilterResolvedCorrectly() throws Exception { List indexRequests = new ArrayList<>(2); indexRequests.add( - client().prepareIndex("idx2", "provider", "1") + client().prepareIndex("idx2") + .setId("1") .setSource( "{\"dates\": {\"month\": {\"label\": \"2014-11\", \"end\": \"2014-11-30\", \"start\": \"2014-11-01\"}, " + "\"day\": \"2014-11-30\"}, \"comments\": [{\"cid\": 3,\"identifier\": \"29111\"}, {\"cid\": 4,\"tags\": [" @@ -467,7 +470,8 @@ public void testParentFilterResolvedCorrectly() throws Exception { ) ); indexRequests.add( - client().prepareIndex("idx2", "provider", "2") + client().prepareIndex("idx2") + .setId("2") .setSource( "{\"dates\": {\"month\": {\"label\": \"2014-12\", \"end\": \"2014-12-31\", \"start\": \"2014-12-01\"}, " + "\"day\": \"2014-12-03\"}, \"comments\": [{\"cid\": 1, \"identifier\": \"29111\"}, {\"cid\": 2,\"tags\": [" @@ -544,7 +548,8 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception { ); ensureGreen("idx4"); - client().prepareIndex("idx4", "product", "1") + client().prepareIndex("idx4") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "product1") @@ -563,7 +568,8 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception { .endObject() ) .get(); - client().prepareIndex("idx4", "product", "2") + client().prepareIndex("idx4") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "product2") @@ -679,7 +685,8 @@ public void testFilterAggInsideNestedAgg() throws Exception { ) ); - client().prepareIndex("classes", "class", "1") + client().prepareIndex("classes") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "QueryBuilder") @@ -718,7 +725,8 @@ public void testFilterAggInsideNestedAgg() throws Exception { .endObject() ) .get(); - client().prepareIndex("classes", "class", "2") + client().prepareIndex("classes") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "Document") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java index 1b86f8fec9994..bfbfc53ed7e76 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java @@ -123,7 +123,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, i + 1) @@ -139,7 +139,8 @@ public void setupSuiteScopeCluster() throws Exception { prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource( jsonBuilder().startObject() // shift sequence by 1, to ensure we have negative values, and value 3 on the edge of the tested ranges @@ -154,10 +155,10 @@ public void setupSuiteScopeCluster() throws Exception { prepareCreate("old_index").addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance").get(); prepareCreate("new_index").addMapping("_doc", "route_length_miles", "type=double").get(); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 50.5)); - builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); - builders.add(client().prepareIndex("new_index", "_doc").setSource(Collections.emptyMap())); + builders.add(client().prepareIndex("old_index").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index").setSource("distance", 50.5)); + builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); + builders.add(client().prepareIndex("new_index").setSource(Collections.emptyMap())); indexRandom(true, builders); ensureSearchable(); @@ -936,8 +937,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource(jsonBuilder().startObject().field("i", 1).endObject()), - client().prepareIndex("cache_test_idx", "type", "2").setSource(jsonBuilder().startObject().field("i", 2).endObject()) + client().prepareIndex("cache_test_idx").setId("1").setSource(jsonBuilder().startObject().field("i", 1).endObject()), + client().prepareIndex("cache_test_idx").setId("2").setSource(jsonBuilder().startObject().field("i", 2).endObject()) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java index fc60620345d0f..e8a57ea3941ff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java @@ -156,7 +156,7 @@ private void insertIdx1(List values1, List values2) throws Excep source.startObject().field("field2", value1).endObject(); } source.endArray().endObject(); - indexRandom(false, client().prepareIndex("idx1", "type").setRouting("1").setSource(source)); + indexRandom(false, client().prepareIndex("idx1").setRouting("1").setSource(source)); } private void insertIdx2(String[][] values) throws Exception { @@ -169,7 +169,7 @@ private void insertIdx2(String[][] values) throws Exception { source.endArray().endObject(); } source.endArray().endObject(); - indexRandom(false, client().prepareIndex("idx2", "type").setRouting("1").setSource(source)); + indexRandom(false, client().prepareIndex("idx2").setRouting("1").setSource(source)); } public void testSimpleReverseNestedToRoot() throws Exception { @@ -569,7 +569,8 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { .addMapping("product", mapping) ); - client().prepareIndex("idx3", "product", "1") + client().prepareIndex("idx3") + .setId("1") .setRefreshPolicy(IMMEDIATE) .setSource( jsonBuilder().startObject() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java index 89bdaa04615dd..94204b6519374 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java @@ -105,10 +105,12 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < data.length; i++) { String[] parts = data[i].split(","); - client().prepareIndex("test", "book", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource("author", parts[5], "name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); - client().prepareIndex("idx_unmapped_author", "book", "" + i) + client().prepareIndex("idx_unmapped_author") + .setId("" + i) .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java index 209da7c978b3c..2300e42b84bbc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java @@ -74,7 +74,7 @@ public class ShardReduceIT extends OpenSearchIntegTestCase { private IndexRequestBuilder indexDoc(String date, int value) throws Exception { - return client().prepareIndex("idx", "type") + return client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("value", value) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 0046dbbd66e44..cbcc9c396fc06 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -40,6 +40,7 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; @@ -92,7 +93,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase { static final String INDEX_NAME = "testidx"; - static final String DOC_TYPE = "_doc"; static final String TEXT_FIELD = "text"; static final String CLASS_FIELD = "class"; @@ -222,10 +222,10 @@ public void testPopularTermManyDeletedDocs() throws Exception { String[] cat2v1 = { "constant", "two" }; String[] cat2v2 = { "constant", "duo" }; List indexRequestBuilderList = new ArrayList<>(); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, cat1v1, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2").setSource(TEXT_FIELD, cat1v2, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3").setSource(TEXT_FIELD, cat2v1, CLASS_FIELD, "2")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4").setSource(TEXT_FIELD, cat2v2, CLASS_FIELD, "2")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("1").setSource(TEXT_FIELD, cat1v1, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("2").setSource(TEXT_FIELD, cat1v2, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("3").setSource(TEXT_FIELD, cat2v1, CLASS_FIELD, "2")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("4").setSource(TEXT_FIELD, cat2v2, CLASS_FIELD, "2")); indexRandom(true, false, indexRequestBuilderList); // Now create some holes in the index with selective deletes caused by updates. @@ -236,7 +236,7 @@ public void testPopularTermManyDeletedDocs() throws Exception { indexRequestBuilderList.clear(); for (int i = 0; i < 50; i++) { text = text == cat1v2 ? cat1v1 : cat1v2; - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, text, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("1").setSource(TEXT_FIELD, text, CLASS_FIELD, "1")); } indexRandom(true, false, indexRequestBuilderList); @@ -479,7 +479,7 @@ private void indexEqualTestData() throws ExecutionException, InterruptedExceptio List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < data.length; i++) { String[] parts = data[i].split("\t"); - indexRequestBuilders.add(client().prepareIndex("test", "_doc", "" + i).setSource("class", parts[0], "text", parts[1])); + indexRequestBuilders.add(client().prepareIndex("test").setId("" + i).setSource("class", parts[0], "text", parts[1])); } indexRandom(true, false, indexRequestBuilders); } @@ -545,7 +545,9 @@ private void indexRandomFrequencies01(String type) throws ExecutionException, In if (type.equals("text")) { textMappings += ",fielddata=true"; } - assertAcked(prepareCreate(INDEX_NAME).addMapping(DOC_TYPE, TEXT_FIELD, textMappings, CLASS_FIELD, "type=keyword")); + assertAcked( + prepareCreate(INDEX_NAME).addMapping(MapperService.SINGLE_MAPPING_NAME, TEXT_FIELD, textMappings, CLASS_FIELD, "type=keyword") + ); String[] gb = { "0", "1" }; List indexRequestBuilderList = new ArrayList<>(); for (int i = 0; i < randomInt(20); i++) { @@ -557,7 +559,7 @@ private void indexRandomFrequencies01(String type) throws ExecutionException, In text[0] = gb[randNum]; } indexRequestBuilderList.add( - client().prepareIndex(INDEX_NAME, DOC_TYPE).setSource(TEXT_FIELD, text, CLASS_FIELD, randomBoolean() ? "one" : "zero") + client().prepareIndex(INDEX_NAME).setSource(TEXT_FIELD, text, CLASS_FIELD, randomBoolean() ? "one" : "zero") ); } indexRandom(true, indexRequestBuilderList); @@ -579,8 +581,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index 789b1ced7ffae..c21f78c5e942d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -81,7 +81,8 @@ public void setupSuiteScopeCluster() throws Exception { int numUniqueTerms = between(2, numDocs / 2); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx", "type", "" + i) + client().prepareIndex("idx") + .setId("" + i) .setSource( jsonBuilder().startObject() .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) @@ -97,7 +98,8 @@ public void setupSuiteScopeCluster() throws Exception { ); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx_single_shard", "type", "" + i) + client().prepareIndex("idx_single_shard") + .setId("" + i) .setSource( jsonBuilder().startObject() .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) @@ -117,7 +119,8 @@ public void setupSuiteScopeCluster() throws Exception { ); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx_single_shard", "type", "" + i) + client().prepareIndex("idx_single_shard") + .setId("" + i) .setRouting(String.valueOf(randomInt(numRoutingValues))) .setSource( jsonBuilder().startObject() @@ -147,7 +150,8 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < entry.getValue(); i++) { String term = entry.getKey(); builders.add( - client().prepareIndex("idx_fixed_docs_0", "type", term + "-" + i) + client().prepareIndex("idx_fixed_docs_0") + .setId(term + "-" + i) .setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, term).endObject()) ); } @@ -172,7 +176,8 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < entry.getValue(); i++) { String term = entry.getKey(); builders.add( - client().prepareIndex("idx_fixed_docs_1", "type", term + "-" + i) + client().prepareIndex("idx_fixed_docs_1") + .setId(term + "-" + i) .setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, term).field("shard", 1).endObject()) ); } @@ -195,7 +200,8 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < entry.getValue(); i++) { String term = entry.getKey(); builders.add( - client().prepareIndex("idx_fixed_docs_2", "type", term + "-" + i) + client().prepareIndex("idx_fixed_docs_2") + .setId(term + "-" + i) .setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, term).field("shard", 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 7d3a6ef7461a8..af006210326d8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -35,6 +35,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.filter.InternalFilter; @@ -57,7 +58,6 @@ public class TermsShardMinDocCountIT extends OpenSearchIntegTestCase { private static final String index = "someindex"; - private static final String type = "testtype"; private static String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString(); @@ -73,7 +73,7 @@ public void testShardMinDocCountSignificantTermsTest() throws Exception { } assertAcked( prepareCreate(index).setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping(type, "text", textMappings) + .addMapping(MapperService.SINGLE_MAPPING_NAME, "text", textMappings) ); List indexBuilders = new ArrayList<>(); @@ -125,10 +125,10 @@ private void addTermsDocs(String term, int numInClass, int numNotInClass, List indexBuilders = new ArrayList<>(); @@ -189,7 +189,7 @@ public void testShardMinDocCountTermsTest() throws Exception { private static void addTermsDocs(String term, int numDocs, List builders) { String sourceClass = "{\"text\": \"" + term + "\"}"; for (int i = 0; i < numDocs; i++) { - builders.add(client().prepareIndex(index, type).setSource(sourceClass, XContentType.JSON)); + builders.add(client().prepareIndex(index).setSource(sourceClass, XContentType.JSON)); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java index e4604c0a91523..252ffeb4ca0e7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -171,7 +171,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 5; i++) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, "val" + i) @@ -206,7 +206,7 @@ public void setupSuiteScopeCluster() throws Exception { ); for (int i = 0; i < 100; i++) { builders.add( - client().prepareIndex("high_card_idx", "type") + client().prepareIndex("high_card_idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, "val" + Strings.padStart(i + "", 3, '0')) @@ -222,7 +222,8 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } @@ -293,44 +294,44 @@ private void getMultiSortDocs(List builders) throws IOExcep ); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val1").field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val2").field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val6").field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val7").field("l", 5).field("d", 1).endObject()) ); } @@ -1267,8 +1268,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", "foo"), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", "bar") + client().prepareIndex("cache_test_idx").setId("1").setSource("s", "foo"), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", "bar") ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java index 6a1c9fac45ab2..e01f966cadd9b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java @@ -160,7 +160,7 @@ public void setupSuiteScopeCluster() throws Exception { precisionThreshold = randomIntBetween(0, 1 << randomInt(20)); IndexRequestBuilder[] builders = new IndexRequestBuilder[(int) numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("str_value", "s" + i) @@ -177,7 +177,7 @@ public void setupSuiteScopeCluster() throws Exception { IndexRequestBuilder[] dummyDocsBuilder = new IndexRequestBuilder[10]; for (int i = 0; i < dummyDocsBuilder.length; i++) { - dummyDocsBuilder[i] = client().prepareIndex("idx", "type").setSource("a_field", "1"); + dummyDocsBuilder[i] = client().prepareIndex("idx").setSource("a_field", "1"); } indexRandom(true, dummyDocsBuilder); @@ -500,8 +500,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index efebe1b0747a2..e8d425596beb0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -60,7 +60,7 @@ public void testRequestBreaker() throws Exception { true, IntStream.range(0, randomIntBetween(10, 1000)) .mapToObj( - i -> client().prepareIndex("test", "_doc") + i -> client().prepareIndex("test") .setId("id_" + i) .setSource(org.opensearch.common.collect.Map.of("field0", randomAlphaOfLength(5), "field1", randomAlphaOfLength(5))) ) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java index fec81ec3a64a2..9549aad5399b5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -875,8 +875,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java index f8a7e36455b06..7aa602fff2ee8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -591,8 +591,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java index c75e7e442f3e1..68f8cf6da575a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -560,8 +560,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index 7bb9492cb4ae7..79f1809fc2f3a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -117,7 +117,8 @@ public void setupSuiteScopeCluster() throws Exception { multiValueSample[i * 2] = firstMultiValueDatapoint; multiValueSample[(i * 2) + 1] = secondMultiValueDatapoint; - IndexRequestBuilder builder = client().prepareIndex("idx", "_doc", String.valueOf(i)) + IndexRequestBuilder builder = client().prepareIndex("idx") + .setId(String.valueOf(i)) .setSource( jsonBuilder().startObject() .field("value", singleValueDatapoint) @@ -141,7 +142,8 @@ public void setupSuiteScopeCluster() throws Exception { builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", String.valueOf(i)) + client().prepareIndex("empty_bucket_idx") + .setId(String.valueOf(i)) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } @@ -521,8 +523,8 @@ public void testScriptCaching() throws Exception { indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java index 5c9a64965a172..beacf7aa1ccec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -293,7 +293,8 @@ public void setupSuiteScopeCluster() throws Exception { numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx", "type", "" + i) + client().prepareIndex("idx") + .setId("" + i) .setSource( jsonBuilder().startObject().field("value", randomAlphaOfLengthBetween(5, 15)).field("l_value", i).endObject() ) @@ -313,7 +314,8 @@ public void setupSuiteScopeCluster() throws Exception { builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } @@ -1187,8 +1189,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java index 10005be669a34..27fc26a114cc4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java @@ -264,8 +264,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java index 45322b53109e9..e9b8c91090695 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java @@ -91,9 +91,9 @@ public void setupSuiteScopeCluster() throws Exception { prepareCreate("new_index").addMapping("_doc", "transit_mode", "type=keyword", "route_length_miles", "type=double").get(); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("old_index", "_doc").setSource("transit_mode", "train", "distance", 42.0)); - builders.add(client().prepareIndex("old_index", "_doc").setSource("transit_mode", "bus", "distance", 50.5)); - builders.add(client().prepareIndex("new_index", "_doc").setSource("transit_mode", "train", "route_length_miles", 100.2)); + builders.add(client().prepareIndex("old_index").setSource("transit_mode", "train", "distance", 42.0)); + builders.add(client().prepareIndex("old_index").setSource("transit_mode", "bus", "distance", 50.5)); + builders.add(client().prepareIndex("new_index").setSource("transit_mode", "train", "route_length_miles", 100.2)); indexRandom(true, builders); ensureSearchable(); @@ -242,8 +242,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 68acc61befb54..8b28261f7f00b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -503,8 +503,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java index cc3ea5062499c..2da6ac3f9e586 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -475,8 +475,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java index bb3f62d399444..b6fdcf4b6267d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java @@ -182,7 +182,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 50; i++) { builders.add( - client().prepareIndex("idx", "type", Integer.toString(i)) + client().prepareIndex("idx") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field(TERMS_AGGS_FIELD, "val" + (i / 10)) @@ -196,39 +197,48 @@ public void setupSuiteScopeCluster() throws Exception { } builders.add( - client().prepareIndex("field-collapsing", "type", "1") + client().prepareIndex("field-collapsing") + .setId("1") .setSource(jsonBuilder().startObject().field("group", "a").field("text", "term x y z b").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "2") + client().prepareIndex("field-collapsing") + .setId("2") .setSource(jsonBuilder().startObject().field("group", "a").field("text", "term x y z n rare").field("value", 1).endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "3") + client().prepareIndex("field-collapsing") + .setId("3") .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x y z term").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "4") + client().prepareIndex("field-collapsing") + .setId("4") .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x y term").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "5") + client().prepareIndex("field-collapsing") + .setId("5") .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x term").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "6") + client().prepareIndex("field-collapsing") + .setId("6") .setSource(jsonBuilder().startObject().field("group", "b").field("text", "term rare").field("value", 3).endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "7") + client().prepareIndex("field-collapsing") + .setId("7") .setSource(jsonBuilder().startObject().field("group", "c").field("text", "x y z term").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "8") + client().prepareIndex("field-collapsing") + .setId("8") .setSource(jsonBuilder().startObject().field("group", "c").field("text", "x y term b").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "9") + client().prepareIndex("field-collapsing") + .setId("9") .setSource(jsonBuilder().startObject().field("group", "c").field("text", "rare x term").field("value", 2).endObject()) ); @@ -243,11 +253,12 @@ public void setupSuiteScopeCluster() throws Exception { } builder.endArray().endObject(); - builders.add(client().prepareIndex("articles", "article").setSource(builder)); + builders.add(client().prepareIndex("articles").setSource(builder)); } builders.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "title 1") @@ -290,7 +301,8 @@ public void setupSuiteScopeCluster() throws Exception { ) ); builders.add( - client().prepareIndex("articles", "article", "2") + client().prepareIndex("articles") + .setId("2") .setSource( jsonBuilder().startObject() .field("title", "title 2") @@ -1142,8 +1154,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java index b96efbd335caf..6d3fe1ed3f190 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java @@ -73,7 +73,8 @@ public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); for (int i = 0; i < 10; i++) { - client().prepareIndex("idx", "type", "" + i) + client().prepareIndex("idx") + .setId("" + i) .setSource( jsonBuilder().startObject().field("value", i + 1).startArray("values").value(i + 2).value(i + 3).endArray().endObject() ) @@ -243,8 +244,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java index f7994cef4788a..590587185b80e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java @@ -88,7 +88,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -103,7 +103,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java index 3a94d04ef81fb..5de4e5162247d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java @@ -155,7 +155,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int docs = 0; docs < numDocs; docs++) { - builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder())); + builders.add(client().prepareIndex("idx").setSource(newDocBuilder())); } indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java index 56fe309d5f984..7674679378758 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -162,12 +162,12 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int docs = 0; docs < numDocs; docs++) { - builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder())); + builders.add(client().prepareIndex("idx").setSource(newDocBuilder())); } - builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(1, 1, 0, 0))); - builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(1, 2, 0, 0))); - builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 1, 0, 0))); - builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 3, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps").setSource(newDocBuilder(1, 1, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps").setSource(newDocBuilder(1, 2, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps").setSource(newDocBuilder(3, 1, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps").setSource(newDocBuilder(3, 3, 0, 0))); indexRandom(true, builders); ensureSearchable(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java index 4c3b956512a3f..d05740a5a0f36 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java @@ -84,7 +84,6 @@ public void setupSuiteScopeCluster() throws Exception { client().admin() .indices() .preparePutMapping(INDEX) - .setType("doc") .setSource("time", "type=date", "foo", "type=keyword", "value_1", "type=float", "value_2", "type=float") .get(); @@ -102,16 +101,16 @@ public void setupSuiteScopeCluster() throws Exception { int termCount = randomIntBetween(3, 6); for (int i = 0; i < termCount; ++i) { builders.add( - client().prepareIndex(INDEX, "doc").setSource(newDocBuilder(time, term, randomIntBetween(1, 10) * randomDouble())) + client().prepareIndex(INDEX).setSource(newDocBuilder(time, term, randomIntBetween(1, 10) * randomDouble())) ); } } time += TimeValue.timeValueHours(1).millis(); } - builders.add(client().prepareIndex(INDEX_WITH_GAPS, "doc").setSource(newDocBuilder(1, "foo", 1.0, 42.0))); - builders.add(client().prepareIndex(INDEX_WITH_GAPS, "doc").setSource(newDocBuilder(2, "foo", null, 42.0))); - builders.add(client().prepareIndex(INDEX_WITH_GAPS, "doc").setSource(newDocBuilder(3, "foo", 3.0, 42.0))); + builders.add(client().prepareIndex(INDEX_WITH_GAPS).setSource(newDocBuilder(1, "foo", 1.0, 42.0))); + builders.add(client().prepareIndex(INDEX_WITH_GAPS).setSource(newDocBuilder(2, "foo", null, 42.0))); + builders.add(client().prepareIndex(INDEX_WITH_GAPS).setSource(newDocBuilder(3, "foo", 3.0, 42.0))); indexRandom(true, builders); ensureSearchable(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java index 0f54e8acae427..d7f16b25a46e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -81,12 +81,11 @@ private ZonedDateTime date(int month, int day) { } private static IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { - return client().prepareIndex(idx, "type") - .setSource(jsonBuilder().startObject().timeField("date", date).field("value", value).endObject()); + return client().prepareIndex(idx).setSource(jsonBuilder().startObject().timeField("date", date).field("value", value).endObject()); } private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx", "type") + return client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("value", value) @@ -108,7 +107,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java index 449807fbe096e..cff655e040124 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java @@ -132,7 +132,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numValueBuckets; i++) { for (int docs = 0; docs < valueCounts[i]; docs++) { - builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder(i * interval))); + builders.add(client().prepareIndex("idx").setSource(newDocBuilder(i * interval))); } } @@ -143,7 +143,7 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < valueCounts_empty.length; i++) { for (int docs = 0; docs < valueCounts_empty[i]; docs++) { - builders.add(client().prepareIndex("empty_bucket_idx", "type").setSource(newDocBuilder(i))); + builders.add(client().prepareIndex("empty_bucket_idx").setSource(newDocBuilder(i))); numDocsEmptyIdx++; } } @@ -160,7 +160,7 @@ public void setupSuiteScopeCluster() throws Exception { // make approximately half of the buckets empty if (randomBoolean()) valueCounts_empty_rnd[i] = 0L; for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { - builders.add(client().prepareIndex("empty_bucket_idx_rnd", "type").setSource(newDocBuilder(i))); + builders.add(client().prepareIndex("empty_bucket_idx_rnd").setSource(newDocBuilder(i))); numDocsEmptyIdx_rnd++; } if (i > 0) { @@ -664,7 +664,7 @@ public void testAvgMovavgDerivNPE() throws Exception { } XContentBuilder doc = jsonBuilder().startObject().field("tick", i).field("value", value).endObject(); - client().prepareIndex("movavg_npe", "type").setSource(doc).get(); + client().prepareIndex("movavg_npe").setSource(doc).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 195b1a5c8bd33..4400181eb2226 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -91,7 +91,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -107,7 +107,8 @@ public void setupSuiteScopeCluster() throws Exception { // creates 6 documents where the value of the field is 0, 1, 2, 3, // 3, 5 builders.add( - client().prepareIndex("idx_gappy", "type", "" + i) + client().prepareIndex("idx_gappy") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i == 4 ? 3 : i).endObject()) ); } @@ -115,7 +116,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java index eb26e8b38f0ec..3d9ebb469cba6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java @@ -102,7 +102,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -117,7 +117,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } @@ -565,7 +566,7 @@ public void testFieldIsntWrittenOutTwice() throws Exception { .field("@timestamp", "2018-07-08T08:07:00.599Z") .endObject(); - client().prepareIndex("foo_2", "doc").setSource(docBuilder).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("foo_2").setSource(docBuilder).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); client().admin().indices().prepareRefresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java index 3d12a36224366..33cc350f10ff1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java @@ -88,7 +88,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -103,7 +103,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java index e3df25fe0e2e6..dc37b49e7a910 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java @@ -175,7 +175,7 @@ public void setupSuiteScopeCluster() throws Exception { for (PipelineAggregationHelperTests.MockBucket mockBucket : mockHisto) { for (double value : mockBucket.docValues) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, mockBucket.key).field(VALUE_FIELD, value).endObject()) ); } @@ -183,14 +183,14 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = -10; i < 10; i++) { builders.add( - client().prepareIndex("neg_idx", "type") + client().prepareIndex("neg_idx") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject()) ); } for (int i = 0; i < 12; i++) { builders.add( - client().prepareIndex("double_predict", "type") + client().prepareIndex("double_predict") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject()) ); } @@ -1288,7 +1288,7 @@ public void testPredictWithNonEmptyBuckets() throws Exception { for (int i = 0; i < 10; i++) { bulkBuilder.add( - client().prepareIndex("predict_non_empty", "type") + client().prepareIndex("predict_non_empty") .setSource( jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).field(VALUE_FIELD2, 10).endObject() ) @@ -1297,7 +1297,7 @@ public void testPredictWithNonEmptyBuckets() throws Exception { for (int i = 10; i < 20; i++) { // Extra so there is a bucket that only has second field bulkBuilder.add( - client().prepareIndex("predict_non_empty", "type") + client().prepareIndex("predict_non_empty") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD2, 10).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java index 48e79b3696ecb..6728c9f888aeb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -92,7 +92,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -107,7 +107,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java index d4480aefbc767..f5a5d025946ec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java @@ -168,7 +168,7 @@ public void setupSuiteScopeCluster() throws Exception { for (PipelineAggregationHelperTests.MockBucket mockBucket : mockHisto) { for (double value : mockBucket.docValues) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, mockBucket.key).field(VALUE_FIELD, value).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java index c06af8cbb2504..90b0aba10e40a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java @@ -88,7 +88,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -103,7 +103,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java index a7aab44c5cdae..873c43d8b0f4c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java @@ -88,7 +88,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -103,7 +103,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java index fba41f6c04e08..3c2aa6642633e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java @@ -128,7 +128,7 @@ private void buildRedIndex(int numShards) throws Exception { ); ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", "" + i).setSource("field1", "value1").get(); + client().prepareIndex("test").setId("" + i).setSource("field1", "value1").get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java index 5fac8b143516a..1d8512e101f78 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java @@ -78,7 +78,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) if (createIndex) { createIndex("test"); } - client().prepareIndex("test", "type1", id).setSource("field", "test").get(); + client().prepareIndex("test").setId(id).setSource("field", "test").get(); RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").get(); // at least one shard should be successful when refreshing assertThat(refreshResponse.getSuccessfulShards(), greaterThanOrEqualTo(1)); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java index 291f4f9ac24cb..fedb6b18d93fb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java @@ -72,7 +72,8 @@ private void testSearchAndRelocateConcurrently(final int numberOfReplicas) throw final int numDocs = between(10, 20); for (int i = 0; i < numDocs; i++) { indexBuilders.add( - client().prepareIndex("test", "type", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("test", "value") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index 8a6459d4083bb..ed7f764c798e5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -128,7 +128,8 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe boolean[] added = new boolean[numDocs]; for (int i = 0; i < numDocs; i++) { try { - IndexResponse indexResponse = client().prepareIndex("test", "type", "" + i) + IndexResponse indexResponse = client().prepareIndex("test") + .setId("" + i) .setTimeout(TimeValue.timeValueSeconds(1)) .setSource("test", English.intToEnglish(i)) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index eeef08ee58baa..f35d07d6d513c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -112,7 +112,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc numInitialDocs = between(10, 100); ensureGreen(); for (int i = 0; i < numInitialDocs; i++) { - client().prepareIndex("test", "type", "init" + i).setSource("test", "init").get(); + client().prepareIndex("test").setId("init" + i).setSource("test", "init").get(); } client().admin().indices().prepareRefresh("test").execute().get(); client().admin().indices().prepareFlush("test").execute().get(); @@ -160,7 +160,8 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc for (int i = 0; i < numDocs; i++) { added[i] = false; try { - IndexResponse indexResponse = client().prepareIndex("test", "type", Integer.toString(i)) + IndexResponse indexResponse = client().prepareIndex("test") + .setId(Integer.toString(i)) .setTimeout(TimeValue.timeValueSeconds(1)) .setSource("test", English.intToEnglish(i)) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/ccs/CrossClusterSearchIT.java index 27eae206ae19a..3258ced753211 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/ccs/CrossClusterSearchIT.java @@ -71,7 +71,7 @@ protected boolean reuseClusters() { private int indexDocs(Client client, String index) { int numDocs = between(1, 10); for (int i = 0; i < numDocs; i++) { - client.prepareIndex(index, "_doc").setSource("f", "v").get(); + client.prepareIndex(index).setSource("f", "v").get(); } client.admin().indices().prepareRefresh(index).get(); return numDocs; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java index 1635608b0b774..b16678d60fce7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java @@ -125,7 +125,8 @@ public void testSimpleNested() throws Exception { List requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -144,7 +145,8 @@ public void testSimpleNested() throws Exception { ) ); requests.add( - client().prepareIndex("articles", "article", "2") + client().prepareIndex("articles") + .setId("2") .setSource( jsonBuilder().startObject() .field("title", "big gray elephant") @@ -261,7 +263,7 @@ public void testRandomNested() throws Exception { source.startObject().field("x", "y").endObject(); } source.endArray().endObject(); - requestBuilders.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource(source)); + requestBuilders.add(client().prepareIndex("idx").setId(Integer.toString(i)).setSource(source)); } indexRandom(true, requestBuilders); @@ -343,7 +345,8 @@ public void testNestedMultipleLayers() throws Exception { List requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -369,7 +372,8 @@ public void testNestedMultipleLayers() throws Exception { ) ); requests.add( - client().prepareIndex("articles", "article", "2") + client().prepareIndex("articles") + .setId("2") .setSource( jsonBuilder().startObject() .field("title", "big gray elephant") @@ -544,7 +548,8 @@ public void testNestedDefinedAsObject() throws Exception { List requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -597,7 +602,8 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { List requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -700,7 +706,8 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { // index the message in an object form instead of an array requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -756,7 +763,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { List requests = new ArrayList<>(); int numDocs = randomIntBetween(2, 35); requests.add( - client().prepareIndex("test", "type1", "0") + client().prepareIndex("test") + .setId("0") .setSource( jsonBuilder().startObject() .field("field1", 0) @@ -774,7 +782,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { ) ); requests.add( - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", 1) @@ -794,7 +803,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { for (int i = 2; i < numDocs; i++) { requests.add( - client().prepareIndex("test", "type1", String.valueOf(i)) + client().prepareIndex("test") + .setId(String.valueOf(i)) .setSource( jsonBuilder().startObject() .field("field1", i) @@ -852,7 +862,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { public void testNestedSource() throws Exception { assertAcked(prepareCreate("index1").addMapping("message", "comments", "type=nested")); - client().prepareIndex("index1", "message", "1") + client().prepareIndex("index1") + .setId("1") .setSource( jsonBuilder().startObject() .field("message", "quick brown fox") @@ -947,8 +958,8 @@ public void testNestedSource() throws Exception { public void testInnerHitsWithIgnoreUnmapped() throws Exception { assertAcked(prepareCreate("index1").addMapping("_doc", "nested_type", "type=nested")); createIndex("index2"); - client().prepareIndex("index1", "_doc", "1").setSource("nested_type", Collections.singletonMap("key", "value")).get(); - client().prepareIndex("index2", "type", "3").setSource("key", "value").get(); + client().prepareIndex("index1").setId("1").setSource("nested_type", Collections.singletonMap("key", "value")).get(); + client().prepareIndex("index2").setId("3").setSource("key", "value").get(); refresh(); SearchResponse response = client().prepareSearch("index1", "index2") @@ -971,7 +982,8 @@ public void testUseMaxDocInsteadOfSize() throws Exception { .prepareUpdateSettings("index2") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH)) .get(); - client().prepareIndex("index2", "type", "1") + client().prepareIndex("index2") + .setId("1") .setSource( jsonBuilder().startObject().startArray("nested").startObject().field("field", "value1").endObject().endArray().endObject() ) @@ -988,7 +1000,8 @@ public void testUseMaxDocInsteadOfSize() throws Exception { public void testTooHighResultWindow() throws Exception { assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested")); - client().prepareIndex("index2", "type", "1") + client().prepareIndex("index2") + .setId("1") .setSource( jsonBuilder().startObject().startArray("nested").startObject().field("field", "value1").endObject().endArray().endObject() ) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java index db3ec0f1232a4..488c253535827 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java @@ -61,9 +61,9 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("name", "test1", "number", 1).get(); - client().prepareIndex("test", "type1", "2").setSource("name", "test2", "number", 2).get(); - client().prepareIndex("test", "type1", "3").setSource("name", "test3", "number", 3).get(); + client().prepareIndex("test").setId("1").setSource("name", "test1", "number", 1).get(); + client().prepareIndex("test").setId("2").setSource("name", "test2", "number", 2).get(); + client().prepareIndex("test").setId("3").setSource("name", "test3", "number", 3).get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -111,9 +111,9 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("name", "test", "title", "title1").get(); - client().prepareIndex("test", "type1", "2").setSource("name", "test").get(); - client().prepareIndex("test", "type1", "3").setSource("name", "test").get(); + client().prepareIndex("test").setId("1").setSource("name", "test", "title", "title1").get(); + client().prepareIndex("test").setId("2").setSource("name", "test").get(); + client().prepareIndex("test").setId("3").setSource("name", "test").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -162,9 +162,9 @@ public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Ex createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("name", "test", "title", "title1").get(); - client().prepareIndex("test", "type1", "2").setSource("name", "test", "title", "title2").get(); - client().prepareIndex("test", "type1", "3").setSource("name", "test", "title", "title3").get(); + client().prepareIndex("test").setId("1").setSource("name", "test", "title", "title1").get(); + client().prepareIndex("test").setId("2").setSource("name", "test", "title", "title2").get(); + client().prepareIndex("test").setId("3").setSource("name", "test", "title", "title3").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -202,7 +202,7 @@ public void testRegExpQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -224,7 +224,7 @@ public void testPrefixQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -246,7 +246,7 @@ public void testFuzzyQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -268,7 +268,7 @@ public void testWildcardQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -290,7 +290,7 @@ public void testSpanFirstQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1 title2").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1 title2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -315,8 +315,8 @@ public void testMatchedWithShould() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("content", "Lorem ipsum dolor sit amet").get(); - client().prepareIndex("test", "type1", "2").setSource("content", "consectetur adipisicing elit").get(); + client().prepareIndex("test").setId("1").setSource("content", "Lorem ipsum dolor sit amet").get(); + client().prepareIndex("test").setId("2").setSource("content", "consectetur adipisicing elit").get(); refresh(); // Execute search at least two times to load it in cache @@ -349,7 +349,7 @@ public void testMatchedWithWrapperQuery() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("content", "Lorem ipsum dolor sit amet").get(); + client().prepareIndex("test").setId("1").setSource("content", "Lorem ipsum dolor sit amet").get(); refresh(); MatchQueryBuilder matchQueryBuilder = matchQuery("content", "amet").queryName("abc"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java index 59dc710f9c1ba..7df5b9b88a69c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java @@ -63,9 +63,8 @@ protected Collection> nodePlugins() { protected void setup() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1") - .setSource("name", "arbitrary content", "other_name", "foo", "other_other_name", "bar"), - client().prepareIndex("test", "test", "2").setSource("other_name", "foo", "other_other_name", "bar") + client().prepareIndex("test").setId("1").setSource("name", "arbitrary content", "other_name", "foo", "other_other_name", "bar"), + client().prepareIndex("test").setId("2").setSource("other_name", "foo", "other_other_name", "bar") ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 85d8f26036177..de2926cadc032 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -152,11 +152,11 @@ public void testHighlightingWithKeywordIgnoreBoundaryScanner() throws IOExceptio .endObject(); mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("1") .setSource(jsonBuilder().startObject().array("tags", "foo bar", "foo bar", "foo bar", "foo baz").field("sort", 1).endObject()) .get(); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("2") .setSource(jsonBuilder().startObject().array("tags", "foo baz", "foo baz", "foo baz", "foo bar").field("sort", 2).endObject()) .get(); @@ -187,7 +187,7 @@ public void testHighlightingWithStoredKeyword() throws IOException { .endObject(); mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("text", "foo").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "foo").endObject()).get(); refresh(); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("text", "foo")) @@ -212,7 +212,7 @@ public void testHighlightingWithWildcardName() throws IOException { .endObject(); mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("text", "text").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "text").endObject()).get(); refresh(); for (String type : ALL_TYPES) { SearchResponse search = client().prepareSearch() @@ -241,7 +241,7 @@ public void testFieldAlias() throws IOException { .endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource("text", "foo").get(); + client().prepareIndex("test").setId("1").setSource("text", "foo").get(); refresh(); for (String type : ALL_TYPES) { @@ -271,7 +271,7 @@ public void testFieldAliasWithSourceLookup() throws IOException { .endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource("text", "foo bar").get(); + client().prepareIndex("test").setId("1").setSource("text", "foo bar").get(); refresh(); for (String type : ALL_TYPES) { @@ -298,7 +298,7 @@ public void testFieldAliasWithWildcardField() throws IOException { .endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource("keyword", "foo").get(); + client().prepareIndex("test").setId("1").setSource("keyword", "foo").get(); refresh(); HighlightBuilder builder = new HighlightBuilder().field(new Field("al*")).requireFieldMatch(false); @@ -330,7 +330,8 @@ public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOExc .endObject(); mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("unstored_text", "text").field("text", "text").endObject()) .get(); refresh(); @@ -358,7 +359,7 @@ public void testHighTermFrequencyDoc() throws IOException { for (int i = 0; i < 6000; i++) { builder.append("abc").append(" "); } - client().prepareIndex("test", "test", "1").setSource("name", builder.toString()).get(); + client().prepareIndex("test").setId("1").setSource("name", builder.toString()).get(); refresh(); SearchResponse search = client().prepareSearch() .setQuery(constantScoreQuery(matchQuery("name", "abc"))) @@ -378,7 +379,8 @@ public void testEnsureNoNegativeOffsets() throws Exception { ) ); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( "no_long_term", "This is a test where foo is highlighed and should be highlighted", @@ -437,7 +439,8 @@ public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( XContentFactory.jsonBuilder() .startObject() @@ -506,7 +509,8 @@ public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exce IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( XContentFactory.jsonBuilder() .startObject() @@ -575,7 +579,8 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( XContentFactory.jsonBuilder() .startObject() @@ -654,12 +659,11 @@ public void testHighlightIssue1994() throws Exception { ); String[] titles = new String[] { "This is a test on the highlighting bug present in opensearch", "The bug is bugging us" }; - indexRandom(false, client().prepareIndex("test", "type1", "1").setSource("title", titles, "titleTV", titles)); + indexRandom(false, client().prepareIndex("test").setId("1").setSource("title", titles, "titleTV", titles)); indexRandom( true, - client().prepareIndex("test", "type1", "2") - .setSource("titleTV", new String[] { "some text to highlight", "highlight other text" }) + client().prepareIndex("test").setId("2").setSource("titleTV", new String[] { "some text to highlight", "highlight other text" }) ); SearchResponse search = client().prepareSearch() @@ -685,7 +689,7 @@ public void testGlobalHighlightingSettingsOverriddenAtFieldLevel() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource( "field1", new String[] { "this is a test", "this is the second test" }, @@ -734,7 +738,7 @@ public void testHighlightingOnWildcardFields() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource( "field-postings", "This is the first test sentence. Here is the second one.", @@ -793,7 +797,7 @@ public void testForceSourceWithSourceDisabled() throws Exception { ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "The quick brown fox jumps over the lazy dog", "field2", "second field content") .get(); refresh(); @@ -835,9 +839,7 @@ public void testForceSourceWithSourceDisabled() throws Exception { public void testPlainHighlighter() throws Exception { ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -856,8 +858,7 @@ public void testFastVectorHighlighter() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") ); logger.info("--> highlighting and searching on field1"); @@ -891,7 +892,7 @@ public void testHighlighterWithSentenceBoundaryScanner() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1").setSource("field1", "A sentence with few words. Another sentence with even more words.") + client().prepareIndex("test").setSource("field1", "A sentence with few words. Another sentence with even more words.") ); for (String type : new String[] { "unified", "fvh" }) { @@ -932,7 +933,7 @@ public void testHighlighterWithSentenceBoundaryScannerAndLocale() throws Excepti indexRandom( true, - client().prepareIndex("test", "type1").setSource("field1", "A sentence with few words. Another sentence with even more words.") + client().prepareIndex("test").setSource("field1", "A sentence with few words. Another sentence with even more words.") ); for (String type : new String[] { "fvh", "unified" }) { @@ -973,10 +974,7 @@ public void testHighlighterWithWordBoundaryScanner() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - indexRandom( - true, - client().prepareIndex("test", "type1").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog") - ); + indexRandom(true, client().prepareIndex("test").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog")); logger.info("--> highlighting and searching on 'field' with word boundary_scanner"); for (String type : new String[] { "unified", "fvh" }) { @@ -1006,10 +1004,7 @@ public void testHighlighterWithWordBoundaryScannerAndLocale() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - indexRandom( - true, - client().prepareIndex("test", "type1").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog") - ); + indexRandom(true, client().prepareIndex("test").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog")); for (String type : new String[] { "unified", "fvh" }) { SearchSourceBuilder source = searchSource().query(termQuery("field1", "some")) @@ -1046,7 +1041,7 @@ public void testFVHManyMatches() throws Exception { // Index one megabyte of "t " over and over and over again String pattern = "t "; String value = new String(new char[1024 * 256 / pattern.length()]).replace("\0", pattern); - client().prepareIndex("test", "type1").setSource("field1", value).get(); + client().prepareIndex("test").setSource("field1", value).get(); refresh(); logger.info("--> highlighting and searching on field1 with default phrase limit"); @@ -1282,7 +1277,7 @@ public void testFastVectorHighlighterManyDocs() throws Exception { int COUNT = between(20, 100); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[COUNT]; for (int i = 0; i < COUNT; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field1", "test " + i); + indexRequestBuilders[i] = client().prepareIndex("test").setId(Integer.toString(i)).setSource("field1", "test " + i); } logger.info("--> indexing docs"); indexRandom(true, indexRequestBuilders); @@ -1323,7 +1318,8 @@ public void testSameContent() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test on the highlighting bug present in opensearch"); } indexRandom(true, indexRequestBuilders); @@ -1350,7 +1346,8 @@ public void testFastVectorHighlighterOffsetParameter() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test on the highlighting bug present in opensearch"); } indexRandom(true, indexRequestBuilders); @@ -1371,7 +1368,8 @@ public void testEscapeHtml() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? opensearch"); } indexRandom(true, indexRequestBuilders); @@ -1391,7 +1389,8 @@ public void testEscapeHtmlVector() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? opensearch"); } indexRandom(true, indexRequestBuilders); @@ -1433,7 +1432,7 @@ public void testMultiMapperVectorWithStore() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -1481,7 +1480,7 @@ public void testMultiMapperVectorFromSource() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -1529,7 +1528,7 @@ public void testMultiMapperNoVectorWithStore() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -1576,7 +1575,7 @@ public void testMultiMapperNoVectorFromSource() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -1602,7 +1601,8 @@ public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exceptio IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test for the enabling fast vector highlighter"); } indexRandom(true, indexRequestBuilders); @@ -1640,7 +1640,8 @@ public void testDisableFastVectorHighlighter() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test for the workaround for the fast vector highlighting SOLR-3724"); } indexRandom(true, indexRequestBuilders); @@ -1695,7 +1696,8 @@ public void testDisableFastVectorHighlighter() throws Exception { public void testFSHHighlightAllMvFragments() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "tags", "type=text,term_vector=with_positions_offsets")); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( "tags", new String[] { @@ -1724,9 +1726,7 @@ public void testFSHHighlightAllMvFragments() throws Exception { public void testBoostingQuery() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -1742,9 +1742,7 @@ public void testBoostingQuery() { public void testBoostingQueryTermVector() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -1761,9 +1759,7 @@ public void testCommonTermsQuery() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -1778,9 +1774,7 @@ public void testCommonTermsTermVector() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) @@ -1794,7 +1788,8 @@ public void testCommonTermsTermVector() throws IOException { public void testPlainHighlightDifferentFragmenter() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "tags", "type=text")); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .array( @@ -1905,7 +1900,7 @@ public void testFastVectorHighlighterMultipleFields() { public void testMissingStoredField() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "highlight_field", "type=text,store=true")); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "highlight").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("field", "highlight").endObject()).get(); refresh(); // This query used to fail when the field to highlight was absent @@ -1943,7 +1938,8 @@ public void testNumericHighlighting() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource("text", "opensearch test", "byte", 25, "short", 42, "int", 100, "long", -1, "float", 3.2f, "double", 42.42) .get(); refresh(); @@ -1967,7 +1963,7 @@ public void testResetTwice() throws Exception { ).addMapping("type", "text", "type=text,analyzer=my_analyzer") ); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("text", "opensearch test").get(); + client().prepareIndex("test").setId("1").setSource("text", "opensearch test").get(); refresh(); SearchResponse response = client().prepareSearch("test") @@ -2286,7 +2282,7 @@ public void testPostingsHighlighter() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy quick dog") .get(); refresh(); @@ -2379,7 +2375,8 @@ public void testPostingsHighlighterNumberOfFragments() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( "field1", "The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. " @@ -2411,7 +2408,8 @@ public void testPostingsHighlighterNumberOfFragments() throws Exception { ); assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo("The quick brown dog jumps over the lazy fox.")); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( "field1", new String[] { @@ -2477,7 +2475,7 @@ public void testMultiMatchQueryHighlight() throws IOException { .endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping)); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "The quick brown fox jumps over", "field2", "The quick brown fox jumps over") .get(); refresh(); @@ -2513,7 +2511,7 @@ public void testPostingsHighlighterOrderByScore() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource( "field1", new String[] { @@ -2563,7 +2561,8 @@ public void testPostingsHighlighterEscapeHtml() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? opensearch"); } indexRandom(true, indexRequestBuilders); @@ -2612,7 +2611,7 @@ public void testPostingsHighlighterMultiMapperWithStore() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test . Second sentence.").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test . Second sentence.").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -2673,7 +2672,7 @@ public void testPostingsHighlighterMultiMapperFromSource() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -2714,7 +2713,8 @@ public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test for the postings highlighter"); } indexRandom(true, indexRequestBuilders); @@ -2729,7 +2729,7 @@ public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { public void testPostingsHighlighterBoostingQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2747,7 +2747,7 @@ public void testPostingsHighlighterCommonTermsQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2790,7 +2790,7 @@ public void testPostingsHighlighterPrefixQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2812,7 +2812,7 @@ public void testPostingsHighlighterFuzzyQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2835,7 +2835,7 @@ public void testPostingsHighlighterRegexpQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2858,7 +2858,7 @@ public void testPostingsHighlighterWildcardQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2894,7 +2894,7 @@ public void testPostingsHighlighterTermRangeQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "aaab").get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "aaab").get(); refresh(); logger.info("--> highlighting and searching on field2"); @@ -2909,7 +2909,7 @@ public void testPostingsHighlighterQueryString() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2932,7 +2932,7 @@ public void testPostingsHighlighterRegexpQueryWithinConstantScoreQuery() throws assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); + client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -2946,7 +2946,7 @@ public void testPostingsHighlighterMultiTermQueryMultipleLevels() throws Excepti assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); + client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -2963,7 +2963,7 @@ public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Except assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); + client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -2978,7 +2978,7 @@ public void testPostingsHighlighterQueryStringWithinFilteredQuery() throws Excep assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); + client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -3002,7 +3002,8 @@ public void testPostingsHighlighterManyDocs() throws Exception { // (https://github.com/elastic/elasticsearch/issues/4103) String prefix = randomAlphaOfLengthBetween(5, 30); prefixes.put(String.valueOf(i), prefix); - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("field1", "Sentence " + prefix + " test. Sentence two."); } logger.info("--> indexing docs"); @@ -3038,7 +3039,7 @@ public void testDoesNotHighlightTypeName() throws Exception { assertAcked(prepareCreate("test").addMapping("typename", mapping)); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "typename").setSource("foo", "test typename")); + indexRandom(true, client().prepareIndex("test").setSource("foo", "test typename")); for (String highlighter : ALL_TYPES) { SearchResponse response = client().prepareSearch("test") @@ -3066,7 +3067,7 @@ public void testDoesNotHighlightAliasFilters() throws Exception { assertAcked(client().admin().indices().prepareAliases().addAlias("test", "filtered_alias", matchQuery("foo", "japanese"))); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "typename").setSource("foo", "test japanese")); + indexRandom(true, client().prepareIndex("test").setSource("foo", "test japanese")); for (String highlighter : ALL_TYPES) { SearchResponse response = client().prepareSearch("filtered_alias") @@ -3189,7 +3190,8 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "Arbitrary text field which will should not cause a failure").endObject()) .get(); refresh(); @@ -3229,7 +3231,8 @@ public void testGeoFieldHighlightingWhenQueryGetsRewritten() throws IOException assertAcked(prepareCreate("test").addMapping("jobs", mappings)); ensureYellow(); - client().prepareIndex("test", "jobs", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("jd", "some आवश्यकता है- आर्य समाज अनाथालय, 68 सिविल लाइन्स, बरेली को एक पुरूष" + " रस text") @@ -3267,7 +3270,8 @@ public void testKeywordFieldHighlighting() throws IOException { mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("keyword_field", "some text").endObject()) .get(); refresh(); @@ -3310,7 +3314,8 @@ public void testACopyFieldWithNestedQuery() throws Exception { ); prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("foo") @@ -3338,7 +3343,8 @@ public void testACopyFieldWithNestedQuery() throws Exception { } public void testFunctionScoreQueryHighlight() throws Exception { - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "brown").endObject()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -3354,7 +3360,8 @@ public void testFunctionScoreQueryHighlight() throws Exception { } public void testFiltersFunctionScoreQueryHighlight() throws Exception { - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "brown").field("enable", "yes").endObject()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -3391,9 +3398,9 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time"); indexRandom( true, - client().prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now), "field", "hello world"), - client().prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1)), "field", "hello"), - client().prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2)), "field", "world") + client().prepareIndex("index-1").setId("1").setSource("d", formatter.format(now), "field", "hello world"), + client().prepareIndex("index-1").setId("2").setSource("d", formatter.format(now.minusDays(1)), "field", "hello"), + client().prepareIndex("index-1").setId("3").setSource("d", formatter.format(now.minusDays(2)), "field", "world") ); ensureSearchable("index-1"); for (int i = 0; i < 5; i++) { @@ -3438,7 +3445,8 @@ public void testWithNestedQuery() throws Exception { ); prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("foo") @@ -3508,7 +3516,8 @@ public void testWithNormalizer() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "doc", "0") + client().prepareIndex("test") + .setId("0") .setSource("keyword", "Hello World") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -3529,7 +3538,8 @@ public void testDisableHighlightIdField() throws Exception { assertAcked(prepareCreate("test").addMapping("doc", "keyword", "type=keyword")); ensureGreen(); - client().prepareIndex("test", "doc", "d33f85bf1e51e84d9ab38948db9f3a068e1fe5294f1d8603914ac8c7bcc39ca1") + client().prepareIndex("test") + .setId("d33f85bf1e51e84d9ab38948db9f3a068e1fe5294f1d8603914ac8c7bcc39ca1") .setSource("keyword", "Hello World") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java index d9013a61e2e08..8767904e03c72 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -226,10 +226,10 @@ public void testWithIndexFilter() throws InterruptedException { assertAcked(prepareCreate("index-2").addMapping("_doc", "timestamp", "type=date", "field1", "type=long")); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("index-1", "_doc").setSource("timestamp", "2015-07-08")); - reqs.add(client().prepareIndex("index-1", "_doc").setSource("timestamp", "2018-07-08")); - reqs.add(client().prepareIndex("index-2", "_doc").setSource("timestamp", "2019-10-12")); - reqs.add(client().prepareIndex("index-2", "_doc").setSource("timestamp", "2020-07-08")); + reqs.add(client().prepareIndex("index-1").setSource("timestamp", "2015-07-08")); + reqs.add(client().prepareIndex("index-1").setSource("timestamp", "2018-07-08")); + reqs.add(client().prepareIndex("index-2").setSource("timestamp", "2019-10-12")); + reqs.add(client().prepareIndex("index-2").setSource("timestamp", "2020-07-08")); indexRandom(true, reqs); FieldCapabilitiesResponse response = client().prepareFieldCaps("index-*").setFields("*").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index e4c6e3b4d88ac..d5cd358612a60 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -48,6 +48,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.index.fielddata.ScriptDocValues; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.rest.RestStatus; @@ -187,7 +188,7 @@ public void testStoredFields() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -206,9 +207,10 @@ public void testStoredFields() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject().field("field1", "value1").field("field2", "value2").field("field3", "value3").endObject() ) @@ -289,7 +291,7 @@ public void testScriptDocAndFields() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("num1") .field("type", "double") @@ -300,21 +302,24 @@ public void testScriptDocAndFields() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).field("date", "1970-01-01T00:00:00").endObject() ) .get(); client().admin().indices().prepareFlush().get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).field("date", "1970-01-01T00:00:25").endObject() ) .get(); client().admin().indices().prepareFlush().get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).field("date", "1970-01-01T00:02:00").endObject() ) @@ -388,7 +393,7 @@ public void testScriptFieldWithNanos() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("doc") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("date") .field("type", "date_nanos") @@ -398,14 +403,15 @@ public void testScriptFieldWithNanos() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("doc").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); String date = "2019-01-31T10:00:00.123456789Z"; indexRandom( true, false, - client().prepareIndex("test", "doc", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("date", "1970-01-01T00:00:00.000Z").endObject()), - client().prepareIndex("test", "doc", "2").setSource(jsonBuilder().startObject().field("date", date).endObject()) + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("date", date).endObject()) ); SearchResponse response = client().prepareSearch() @@ -443,7 +449,8 @@ public void testIdBasedScriptFields() throws Exception { int numDocs = randomIntBetween(1, 30); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("num1", i).endObject()); } indexRandom(true, indexRequestBuilders); @@ -505,7 +512,8 @@ public void testIdBasedScriptFields() throws Exception { public void testScriptFieldUsingSource() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startObject("obj1") @@ -566,7 +574,7 @@ public void testScriptFieldUsingSource() throws Exception { } public void testScriptFieldsForNullReturn() throws Exception { - client().prepareIndex("test", "type1", "1").setSource("foo", "bar").setRefreshPolicy("true").get(); + client().prepareIndex("test").setId("1").setSource("foo", "bar").setRefreshPolicy("true").get(); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) @@ -585,7 +593,8 @@ public void testScriptFieldsForNullReturn() throws Exception { public void testPartialFields() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( XContentFactory.jsonBuilder() .startObject() @@ -617,7 +626,7 @@ public void testStoredFieldsWithoutSource() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -663,10 +672,11 @@ public void testStoredFieldsWithoutSource() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("byte_field", (byte) 1) @@ -731,7 +741,8 @@ public void testStoredFieldsWithoutSource() throws Exception { } public void testSearchFieldsMetadata() throws Exception { - client().prepareIndex("my-index", "my-type1", "1") + client().prepareIndex("my-index") + .setId("1") .setRouting("1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -745,7 +756,8 @@ public void testSearchFieldsMetadata() throws Exception { } public void testSearchFieldsNonLeafField() throws Exception { - client().prepareIndex("my-index", "my-type1", "1") + client().prepareIndex("my-index") + .setId("1") .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -763,9 +775,9 @@ public void testGetFieldsComplexField() throws Exception { .prepareCreate("my-index") .setSettings(Settings.builder().put("index.refresh_interval", -1)) .addMapping( - "doc", + MapperService.SINGLE_MAPPING_NAME, jsonBuilder().startObject() - .startObject("doc") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "object") @@ -817,7 +829,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject() ); - client().prepareIndex("my-index", "doc", "1").setRefreshPolicy(IMMEDIATE).setSource(source, XContentType.JSON).get(); + client().prepareIndex("my-index").setId("1").setRefreshPolicy(IMMEDIATE).setSource(source, XContentType.JSON).get(); String field = "field1.field2.field3.field4"; @@ -831,7 +843,7 @@ public void testGetFieldsComplexField() throws Exception { // see #8203 public void testSingleValueFieldDatatField() throws ExecutionException, InterruptedException { assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "test_field", "type=keyword").get()); - indexRandom(true, client().prepareIndex("test", "type", "1").setSource("test_field", "foobar")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("test_field", "foobar")); refresh(); SearchResponse searchResponse = client().prepareSearch("test") .setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).docValueField("test_field")) @@ -847,7 +859,7 @@ public void testDocValueFields() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -895,10 +907,11 @@ public void testDocValueFields() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("text_field", "foo") @@ -1124,7 +1137,8 @@ public void testScriptFields() throws Exception { List reqs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { reqs.add( - client().prepareIndex("index", "type", Integer.toString(i)) + client().prepareIndex("index") + .setId(Integer.toString(i)) .setSource( "s", Integer.toString(i), @@ -1167,7 +1181,7 @@ public void testScriptFields() throws Exception { public void testDocValueFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -1191,13 +1205,13 @@ public void testDocValueFieldsWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); ensureGreen("test"); DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); org.joda.time.format.DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd"); - index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date)); + index("test", MapperService.SINGLE_MAPPING_NAME, "1", "text_field", "foo", "date_field", formatter.print(date)); refresh("test"); SearchRequestBuilder builder = client().prepareSearch() @@ -1230,7 +1244,7 @@ public void testDocValueFieldsWithFieldAlias() throws Exception { public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -1254,13 +1268,13 @@ public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); ensureGreen("test"); DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); org.joda.time.format.DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd"); - index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date)); + index("test", MapperService.SINGLE_MAPPING_NAME, "1", "text_field", "foo", "date_field", formatter.print(date)); refresh("test"); SearchRequestBuilder builder = client().prepareSearch() @@ -1292,7 +1306,7 @@ public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { public void testStoredFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -1313,9 +1327,9 @@ public void testStoredFieldsWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); - index("test", "type", "1", "field1", "value1", "field2", "value2"); + index("test", MapperService.SINGLE_MAPPING_NAME, "1", "field1", "value1", "field2", "value2"); refresh("test"); SearchResponse searchResponse = client().prepareSearch() @@ -1336,7 +1350,7 @@ public void testStoredFieldsWithFieldAlias() throws Exception { public void testWildcardStoredFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -1357,9 +1371,9 @@ public void testWildcardStoredFieldsWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); - index("test", "type", "1", "field1", "value1", "field2", "value2"); + index("test", MapperService.SINGLE_MAPPING_NAME, "1", "field1", "value1", "field2", "value2"); refresh("test"); SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").get(); @@ -1382,7 +1396,8 @@ public void testLoadMetadata() throws Exception { indexRandom( true, - client().prepareIndex("test", "doc", "1") + client().prepareIndex("test") + .setId("1") .setRouting("1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java index 0f47877facaff..712026eaf5c43 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java @@ -51,6 +51,7 @@ import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; import org.opensearch.search.MultiValueMode; +import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; @@ -77,7 +78,9 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertOrderedSearchHits; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; @@ -113,7 +116,6 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { List indexBuilders = new ArrayList<>(); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( @@ -128,7 +130,6 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { ); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource( @@ -146,7 +147,6 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { for (int i = 1; i <= numDummyDocs; i++) { indexBuilders.add( client().prepareIndex() - .setType("type1") .setId(Integer.toString(i + 3)) .setIndex("test") .setSource( @@ -244,14 +244,12 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { List indexBuilders = new ArrayList<>(); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource(jsonBuilder().startObject().field("test", "value").field("num", 0.5).endObject()) ); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.7).endObject()) @@ -262,7 +260,6 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { for (int i = 0; i < numDummyDocs; i++) { indexBuilders.add( client().prepareIndex() - .setType("type1") .setId(Integer.toString(i + 3)) .setIndex("test") .setSource(jsonBuilder().startObject().field("test", "value").field("num", 3.0 + i).endObject()) @@ -360,7 +357,6 @@ public void testBoostModeSettingWorks() throws Exception { List indexBuilders = new ArrayList<>(); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( @@ -375,7 +371,6 @@ public void testBoostModeSettingWorks() throws Exception { ); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource( @@ -459,7 +454,6 @@ public void testParseGeoPoint() throws Exception { ); client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( @@ -528,7 +522,6 @@ public void testCombineModes() throws Exception { ); client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setRefreshPolicy(IMMEDIATE) @@ -626,6 +619,76 @@ public void testCombineModes() throws Exception { } + public void testCombineModesExplain() throws Exception { + assertAcked( + prepareCreate("test").addMapping( + "type1", + jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("test") + .field("type", "text") + .endObject() + .startObject("num") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + + client().prepareIndex() + .setId("1") + .setIndex("test") + .setRefreshPolicy(IMMEDIATE) + .setSource(jsonBuilder().startObject().field("test", "value value").field("num", 1.0).endObject()) + .get(); + + FunctionScoreQueryBuilder baseQuery = functionScoreQuery( + constantScoreQuery(termQuery("test", "value")).queryName("query1"), + ScoreFunctionBuilders.weightFactorFunction(2, "weight1") + ); + // decay score should return 0.5 for this function and baseQuery should return 2.0f as it's score + ActionFuture response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5, "func2")).boostMode( + CombineFunction.MULTIPLY + ) + ) + ) + ); + SearchResponse sr = response.actionGet(); + SearchHits sh = sr.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(0).getExplanation().getDetails(), arrayWithSize(2)); + assertThat(sh.getAt(0).getExplanation().getDetails()[0].getDetails(), arrayWithSize(2)); + // "description": "ConstantScore(test:value) (_name: query1)" + assertThat( + sh.getAt(0).getExplanation().getDetails()[0].getDetails()[0].getDescription(), + equalTo("ConstantScore(test:value) (_name: query1)") + ); + assertThat(sh.getAt(0).getExplanation().getDetails()[0].getDetails()[1].getDetails(), arrayWithSize(2)); + assertThat(sh.getAt(0).getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDetails(), arrayWithSize(2)); + // "description": "constant score 1.0(_name: func1) - no function provided" + assertThat( + sh.getAt(0).getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDetails()[0].getDescription(), + equalTo("constant score 1.0(_name: weight1) - no function provided") + ); + // "description": "exp(-0.5*pow(MIN[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)],2.0)/0.7213475204444817, + // _name: func2)" + assertThat(sh.getAt(0).getExplanation().getDetails()[1].getDetails(), arrayWithSize(2)); + assertThat(sh.getAt(0).getExplanation().getDetails()[1].getDetails()[0].getDetails(), arrayWithSize(1)); + assertThat( + sh.getAt(0).getExplanation().getDetails()[1].getDetails()[0].getDetails()[0].getDescription(), + containsString("_name: func2") + ); + } + public void testExceptionThrownIfScaleLE0() throws Exception { assertAcked( prepareCreate("test").addMapping( @@ -900,7 +963,6 @@ public void testManyDocsLin() throws Exception { indexBuilders.add( client().prepareIndex() - .setType("type") .setId(Integer.toString(i)) .setIndex("test") .setSource( @@ -1098,7 +1160,6 @@ public void testMultiFieldOptions() throws Exception { // Index for testing MIN and MAX IndexRequestBuilder doc1 = client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( @@ -1117,7 +1178,6 @@ public void testMultiFieldOptions() throws Exception { .endObject() ); IndexRequestBuilder doc2 = client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource( @@ -1171,14 +1231,12 @@ public void testMultiFieldOptions() throws Exception { // Now test AVG and SUM doc1 = client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( jsonBuilder().startObject().field("test", "value").startArray("num").value(0.0).value(1.0).value(2.0).endArray().endObject() ); doc2 = client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()); @@ -1210,4 +1268,132 @@ public void testMultiFieldOptions() throws Exception { sh = sr.getHits(); assertThat((double) (sh.getAt(0).getScore()), closeTo((sh.getAt(1).getScore()), 1.e-6d)); } + + public void testDistanceScoreGeoLinGaussExplain() throws Exception { + assertAcked( + prepareCreate("test").addMapping( + "type1", + jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("test") + .field("type", "text") + .endObject() + .startObject("loc") + .field("type", "geo_point") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + + List indexBuilders = new ArrayList<>(); + indexBuilders.add( + client().prepareIndex() + .setId("1") + .setIndex("test") + .setSource( + jsonBuilder().startObject() + .field("test", "value") + .startObject("loc") + .field("lat", 10) + .field("lon", 20) + .endObject() + .endObject() + ) + ); + indexBuilders.add( + client().prepareIndex() + .setId("2") + .setIndex("test") + .setSource( + jsonBuilder().startObject() + .field("test", "value") + .startObject("loc") + .field("lat", 11) + .field("lon", 22) + .endObject() + .endObject() + ) + ); + + indexRandom(true, indexBuilders); + + // Test Gauss + List lonlat = new ArrayList<>(); + lonlat.add(20f); + lonlat.add(11f); + + final String queryName = "query1"; + final String functionName = "func1"; + ActionFuture response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery(baseQuery.queryName(queryName), gaussDecayFunction("loc", lonlat, "1000km", functionName)) + ) + ) + ); + SearchResponse sr = response.actionGet(); + SearchHits sh = sr.getHits(); + assertThat(sh.getTotalHits().value, equalTo(2L)); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + assertExplain(queryName, functionName, sr); + + response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery(baseQuery.queryName(queryName), linearDecayFunction("loc", lonlat, "1000km", functionName)) + ) + ) + ); + + sr = response.actionGet(); + sh = sr.getHits(); + assertThat(sh.getTotalHits().value, equalTo(2L)); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + assertExplain(queryName, functionName, sr); + + response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery( + baseQuery.queryName(queryName), + exponentialDecayFunction("loc", lonlat, "1000km", functionName) + ) + ) + ) + ); + + sr = response.actionGet(); + sh = sr.getHits(); + assertThat(sh.getTotalHits().value, equalTo(2L)); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + assertExplain(queryName, functionName, sr); + } + + private void assertExplain(final String queryName, final String functionName, SearchResponse sr) { + SearchHit firstHit = sr.getHits().getAt(0); + assertThat(firstHit.getExplanation().getDetails(), arrayWithSize(2)); + // "description": "*:* (_name: query1)" + assertThat(firstHit.getExplanation().getDetails()[0].getDescription().toString(), containsString("_name: " + queryName)); + assertThat(firstHit.getExplanation().getDetails()[1].getDetails(), arrayWithSize(2)); + // "description": "random score function (seed: 12345678, field: _seq_no, _name: func1)" + assertThat(firstHit.getExplanation().getDetails()[1].getDetails()[0].getDetails(), arrayWithSize(1)); + // "description": "exp(-0.5*pow(MIN of: [Math.max(arcDistance(10.999999972991645, 21.99999994598329(=doc value),11.0, 20.0(=origin)) + // - 0.0(=offset), 0)],2.0)/7.213475204444817E11, _name: func1)" + assertThat( + firstHit.getExplanation().getDetails()[1].getDetails()[0].getDetails()[0].getDescription().toString(), + containsString("_name: " + functionName) + ); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java index f577636d18d4b..f67b913a75871 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java @@ -38,6 +38,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchType; import org.opensearch.common.lucene.search.function.CombineFunction; +import org.opensearch.common.lucene.search.function.Functions; import org.opensearch.common.settings.Settings; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; @@ -72,6 +73,7 @@ import static org.opensearch.index.query.QueryBuilders.termQuery; import static org.opensearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.opensearch.search.builder.SearchSourceBuilder.searchSource; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -121,8 +123,17 @@ static class MyScript extends ScoreScript implements ExplainableScoreScript { @Override public Explanation explain(Explanation subQueryScore) throws IOException { + return explain(subQueryScore, null); + } + + @Override + public Explanation explain(Explanation subQueryScore, String functionName) throws IOException { Explanation scoreExp = Explanation.match(subQueryScore.getValue(), "_score: ", subQueryScore); - return Explanation.match((float) (execute(null)), "This script returned " + execute(null), scoreExp); + return Explanation.match( + (float) (execute(null)), + "This script" + Functions.nameOrEmptyFunc(functionName) + " returned " + execute(null), + scoreExp + ); } @Override @@ -140,7 +151,7 @@ public void testExplainScript() throws InterruptedException, IOException, Execut List indexRequests = new ArrayList<>(); for (int i = 0; i < 20; i++) { indexRequests.add( - client().prepareIndex("test", "type") + client().prepareIndex("test") .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("number_field", i).field("text", "text").endObject()) ); @@ -174,4 +185,36 @@ public void testExplainScript() throws InterruptedException, IOException, Execut idCounter--; } } + + public void testExplainScriptWithName() throws InterruptedException, IOException, ExecutionException { + List indexRequests = new ArrayList<>(); + indexRequests.add( + client().prepareIndex("test") + .setId(Integer.toString(1)) + .setSource(jsonBuilder().startObject().field("number_field", 1).field("text", "text").endObject()) + ); + indexRandom(true, true, indexRequests); + client().admin().indices().prepareRefresh().get(); + ensureYellow(); + SearchResponse response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery( + termQuery("text", "text"), + scriptFunction(new Script(ScriptType.INLINE, "test", "explainable_script", Collections.emptyMap()), "func1") + ).boostMode(CombineFunction.REPLACE) + ) + ) + ).actionGet(); + + OpenSearchAssertions.assertNoFailures(response); + SearchHits hits = response.getHits(); + assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getHits()[0].getId(), equalTo("1")); + assertThat(hits.getHits()[0].getExplanation().getDetails(), arrayWithSize(2)); + assertThat(hits.getHits()[0].getExplanation().getDetails()[0].getDescription(), containsString("_name: func1")); + } + } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java index 573bd7f75c266..8e0a14b7062a7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -35,10 +35,13 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.lucene.search.function.FieldValueFactorFunction; +import org.opensearch.search.SearchHit; import org.opensearch.test.OpenSearchIntegTestCase; import java.io.IOException; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.arrayWithSize; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.index.query.QueryBuilders.functionScoreQuery; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; @@ -71,9 +74,9 @@ public void testFieldValueFactor() throws IOException { ).get() ); - client().prepareIndex("test", "type1", "1").setSource("test", 5, "body", "foo").get(); - client().prepareIndex("test", "type1", "2").setSource("test", 17, "body", "foo").get(); - client().prepareIndex("test", "type1", "3").setSource("body", "bar").get(); + client().prepareIndex("test").setId("1").setSource("test", 5, "body", "foo").get(); + client().prepareIndex("test").setId("2").setSource("test", 17, "body", "foo").get(); + client().prepareIndex("test").setId("3").setSource("body", "bar").get(); refresh(); @@ -143,7 +146,7 @@ public void testFieldValueFactor() throws IOException { .get(); assertEquals(response.getHits().getAt(0).getScore(), response.getHits().getAt(2).getScore(), 0); - client().prepareIndex("test", "type1", "2").setSource("test", -1, "body", "foo").get(); + client().prepareIndex("test").setId("2").setSource("test", -1, "body", "foo").get(); refresh(); // -1 divided by 0 is infinity, which should provoke an exception. @@ -163,4 +166,47 @@ public void testFieldValueFactor() throws IOException { // locally, instead of just having failures } } + + public void testFieldValueFactorExplain() throws IOException { + assertAcked( + prepareCreate("test").addMapping( + "type1", + jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("test") + .field("type", randomFrom(new String[] { "short", "float", "long", "integer", "double" })) + .endObject() + .startObject("body") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + ).get() + ); + + client().prepareIndex("test").setId("1").setSource("test", 5, "body", "foo").get(); + client().prepareIndex("test").setId("2").setSource("test", 17, "body", "foo").get(); + client().prepareIndex("test").setId("3").setSource("body", "bar").get(); + + refresh(); + + // document 2 scores higher because 17 > 5 + final String functionName = "func1"; + final String queryName = "query"; + SearchResponse response = client().prepareSearch("test") + .setExplain(true) + .setQuery( + functionScoreQuery(simpleQueryStringQuery("foo").queryName(queryName), fieldValueFactorFunction("test", functionName)) + ) + .get(); + assertOrderedSearchHits(response, "2", "1"); + SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getExplanation().getDetails(), arrayWithSize(2)); + // "description": "sum of: (_name: query)" + assertThat(firstHit.getExplanation().getDetails()[0].getDescription(), containsString("_name: " + queryName)); + // "description": "field value function(_name: func1): none(doc['test'].value * factor=1.0)" + assertThat(firstHit.getExplanation().getDetails()[1].toString(), containsString("_name: " + functionName)); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java index 8bb844d5edc68..3d24933f66d17 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java @@ -43,6 +43,7 @@ import org.opensearch.script.MockScriptPlugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; +import org.opensearch.search.SearchHit; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchTestCase; @@ -66,6 +67,8 @@ import static org.opensearch.search.builder.SearchSourceBuilder.searchSource; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -140,6 +143,35 @@ public void testScriptScoresWithAgg() throws IOException { assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); } + public void testScriptScoresWithAggWithExplain() throws IOException { + createIndex(INDEX); + index(INDEX, TYPE, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); + refresh(); + + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get score value", Collections.emptyMap()); + + SearchResponse response = client().search( + searchRequest().source( + searchSource().explain(true) + .query(functionScoreQuery(scriptFunction(script, "func1"), "query1")) + .aggregation(terms("score_agg").script(script)) + ) + ).actionGet(); + assertSearchResponse(response); + + final SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getScore(), equalTo(1.0f)); + assertThat(firstHit.getExplanation().getDetails(), arrayWithSize(2)); + // "description": "*:* (_name: query1)" + assertThat(firstHit.getExplanation().getDetails()[0].getDescription(), containsString("_name: query1")); + assertThat(firstHit.getExplanation().getDetails()[1].getDetails(), arrayWithSize(2)); + // "description": "script score function(_name: func1), computed with script:\"Script{ ... }\"" + assertThat(firstHit.getExplanation().getDetails()[1].getDetails()[0].getDescription(), containsString("_name: func1")); + + assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getKeyAsString(), equalTo("1.0")); + assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); + } + public void testMinScoreFunctionScoreBasic() throws IOException { float score = randomValueOtherThanMany((f) -> Float.compare(f, 0) < 0, OpenSearchTestCase::randomFloat); float minScore = randomValueOtherThanMany((f) -> Float.compare(f, 0) < 0, OpenSearchTestCase::randomFloat); @@ -189,7 +221,7 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept int scoreOffset = randomIntBetween(0, 2 * numDocs); int minScore = randomIntBetween(0, 2 * numDocs); for (int i = 0; i < numDocs; i++) { - docs.add(client().prepareIndex(INDEX, TYPE, Integer.toString(i)).setSource("num", i + scoreOffset)); + docs.add(client().prepareIndex(INDEX).setId(Integer.toString(i)).setSource("num", i + scoreOffset)); } indexRandom(true, docs); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return (doc['num'].value)", Collections.emptyMap()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java index af7633628dab1..885f1aa7ff7a0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java @@ -171,7 +171,7 @@ public double evaluate(double value, double scale) { } @Override - public Explanation explainFunction(String distanceString, double distanceVal, double scale) { + public Explanation explainFunction(String distanceString, double distanceVal, double scale, String functionName) { return Explanation.match((float) distanceVal, "" + distanceVal); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java index 134c68538d15b..a21363e58949b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java @@ -92,7 +92,7 @@ public void testEnforceWindowSize() { // this int iters = scaledRandomIntBetween(10, 20); for (int i = 0; i < iters; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("f", Integer.toString(i)).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("f", Integer.toString(i)).get(); } refresh(); @@ -142,9 +142,10 @@ public void testRescorePhrase() throws Exception { ).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1)) ); - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree ").get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test").setId("1").setSource("field1", "the quick brown fox").get(); + client().prepareIndex("test").setId("2").setSource("field1", "the quick lazy huge brown fox jumps over the tree ").get(); + client().prepareIndex("test") + .setId("3") .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree") .get(); refresh(); @@ -207,21 +208,21 @@ public void testMoreDocs() throws Exception { .setSettings(builder.put("index.number_of_shards", 1)) ); - client().prepareIndex("test", "type1", "1").setSource("field1", "massachusetts avenue boston massachusetts").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts").get(); - client().prepareIndex("test", "type1", "3").setSource("field1", "boston avenue lexington massachusetts").get(); + client().prepareIndex("test").setId("1").setSource("field1", "massachusetts avenue boston massachusetts").get(); + client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts").get(); + client().prepareIndex("test").setId("3").setSource("field1", "boston avenue lexington massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "4").setSource("field1", "boston road lexington massachusetts").get(); - client().prepareIndex("test", "type1", "5").setSource("field1", "lexington street lexington massachusetts").get(); - client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); - client().prepareIndex("test", "type1", "7").setSource("field1", "bosten street san franciso california").get(); + client().prepareIndex("test").setId("4").setSource("field1", "boston road lexington massachusetts").get(); + client().prepareIndex("test").setId("5").setSource("field1", "lexington street lexington massachusetts").get(); + client().prepareIndex("test").setId("6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); + client().prepareIndex("test").setId("7").setSource("field1", "bosten street san franciso california").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "8").setSource("field1", "hollywood boulevard los angeles california").get(); - client().prepareIndex("test", "type1", "9").setSource("field1", "1st street boston massachussetts").get(); - client().prepareIndex("test", "type1", "10").setSource("field1", "1st street boston massachusetts").get(); + client().prepareIndex("test").setId("8").setSource("field1", "hollywood boulevard los angeles california").get(); + client().prepareIndex("test").setId("9").setSource("field1", "1st street boston massachussetts").get(); + client().prepareIndex("test").setId("10").setSource("field1", "1st street boston massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "11").setSource("field1", "2st street boston massachusetts").get(); - client().prepareIndex("test", "type1", "12").setSource("field1", "3st street boston massachusetts").get(); + client().prepareIndex("test").setId("11").setSource("field1", "2st street boston massachusetts").get(); + client().prepareIndex("test").setId("12").setSource("field1", "3st street boston massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) @@ -302,11 +303,11 @@ public void testSmallRescoreWindow() throws Exception { .setSettings(builder.put("index.number_of_shards", 1)) ); - client().prepareIndex("test", "type1", "3").setSource("field1", "massachusetts").get(); - client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); + client().prepareIndex("test").setId("3").setSource("field1", "massachusetts").get(); + client().prepareIndex("test").setId("6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "1").setSource("field1", "lexington massachusetts avenue").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts road").get(); + client().prepareIndex("test").setId("1").setSource("field1", "lexington massachusetts avenue").get(); + client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts road").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse searchResponse = client().prepareSearch() @@ -388,11 +389,11 @@ public void testRescorerMadeScoresWorse() throws Exception { .setSettings(builder.put("index.number_of_shards", 1)) ); - client().prepareIndex("test", "type1", "3").setSource("field1", "massachusetts").get(); - client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); + client().prepareIndex("test").setId("3").setSource("field1", "massachusetts").get(); + client().prepareIndex("test").setId("6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "1").setSource("field1", "lexington massachusetts avenue").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts road").get(); + client().prepareIndex("test").setId("1").setSource("field1", "lexington massachusetts avenue").get(); + client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts road").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse searchResponse = client().prepareSearch() @@ -538,9 +539,10 @@ public void testExplain() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree").get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test").setId("1").setSource("field1", "the quick brown fox").get(); + client().prepareIndex("test").setId("2").setSource("field1", "the quick lazy huge brown fox jumps over the tree").get(); + client().prepareIndex("test") + .setId("3") .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree") .get(); refresh(); @@ -800,7 +802,7 @@ private int indexRandomNumbers(String analyzer, int shards, boolean dummyDocs) t int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i)); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i)); } indexRandom(true, dummyDocs, docs); @@ -815,7 +817,7 @@ public void testFromSize() throws Exception { settings.put(SETTING_NUMBER_OF_REPLICAS, 0); assertAcked(prepareCreate("test").setSettings(settings)); for (int i = 0; i < 5; i++) { - client().prepareIndex("test", "type", "" + i).setSource("text", "hello world").get(); + client().prepareIndex("test").setId("" + i).setSource("text", "hello world").get(); } refresh(); @@ -831,7 +833,7 @@ public void testFromSize() throws Exception { public void testRescorePhaseWithInvalidSort() throws Exception { assertAcked(prepareCreate("test")); for (int i = 0; i < 5; i++) { - client().prepareIndex("test", "type", "" + i).setSource("number", 0).get(); + client().prepareIndex("test").setId("" + i).setSource("number", 0).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java index ffdbdcdabec75..670f5e65eb575 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java @@ -63,6 +63,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -178,7 +179,8 @@ public void testScoreAccessWithinScript() throws Exception { int docCount = randomIntBetween(100, 200); for (int i = 0; i < docCount; i++) { - client().prepareIndex("test", "type", "" + i) + client().prepareIndex("test") + .setId("" + i) // we add 1 to the index field to make sure that the scripts below never compute log(0) .setSource("body", randomFrom(Arrays.asList("foo", "bar", "baz")), "index", i + 1) .get(); @@ -288,6 +290,37 @@ public void testSeedReportedInExplain() throws Exception { assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); } + public void testSeedAndNameReportedInExplain() throws Exception { + createIndex("test"); + ensureGreen(); + index("test", "type", "1", jsonBuilder().startObject().endObject()); + flush(); + refresh(); + + int seed = 12345678; + + final String queryName = "query1"; + final String functionName = "func1"; + SearchResponse resp = client().prepareSearch("test") + .setQuery( + functionScoreQuery( + matchAllQuery().queryName(queryName), + randomFunction(functionName).seed(seed).setField(SeqNoFieldMapper.NAME) + ) + ) + .setExplain(true) + .get(); + assertNoFailures(resp); + assertEquals(1, resp.getHits().getTotalHits().value); + SearchHit firstHit = resp.getHits().getAt(0); + assertThat(firstHit.getExplanation().getDetails(), arrayWithSize(2)); + // "description": "*:* (_name: query1)" + assertThat(firstHit.getExplanation().getDetails()[0].getDescription().toString(), containsString("_name: " + queryName)); + assertThat(firstHit.getExplanation().getDetails()[1].getDetails(), arrayWithSize(2)); + // "description": "random score function (seed: 12345678, field: _seq_no, _name: func1)" + assertThat(firstHit.getExplanation().getDetails()[1].getDetails()[0].getDescription().toString(), containsString("seed: " + seed)); + } + public void testNoDocs() throws Exception { createIndex("test"); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java index ece21899fbea5..f865e56cd99e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java @@ -72,7 +72,8 @@ public void testSimpleBoundingBoxTest() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "New York") @@ -85,7 +86,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 5.286 km - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "Times Square") @@ -98,7 +100,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 0.4621 km - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("name", "Tribeca") @@ -111,7 +114,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 1.055 km - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .field("name", "Wall Street") @@ -124,7 +128,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 1.258 km - client().prepareIndex("test", "type1", "5") + client().prepareIndex("test") + .setId("5") .setSource( jsonBuilder().startObject() .field("name", "Soho") @@ -137,7 +142,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 2.029 km - client().prepareIndex("test", "type1", "6") + client().prepareIndex("test") + .setId("6") .setSource( jsonBuilder().startObject() .field("name", "Greenwich Village") @@ -150,7 +156,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 8.572 km - client().prepareIndex("test", "type1", "7") + client().prepareIndex("test") + .setId("7") .setSource( jsonBuilder().startObject() .field("name", "Brooklyn") @@ -196,7 +203,8 @@ public void testLimit2BoundingBox() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("userid", 880) @@ -210,7 +218,8 @@ public void testLimit2BoundingBox() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("userid", 534) @@ -274,7 +283,8 @@ public void testCompleteLonRange() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("userid", 880) @@ -288,7 +298,8 @@ public void testCompleteLonRange() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("userid", 534) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java index ceac97f25c8d6..d00c0a8c0faf7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java @@ -134,7 +134,8 @@ public void setupTestIndex() throws IOException { } public void testDistanceScript() throws Exception { - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "TestPosition") @@ -202,7 +203,8 @@ public void testDistanceScript() throws Exception { } public void testGeoDistanceAggregation() throws IOException { - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "TestPosition") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index 478d018ed7fba..d899451660cb7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -249,7 +249,7 @@ public void testShapeRelations() throws Exception { ); BytesReference data = BytesReference.bytes(jsonBuilder().startObject().field("area", polygon).endObject()); - client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).get(); + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); // Point in polygon @@ -312,7 +312,7 @@ public void testShapeRelations() throws Exception { ); data = BytesReference.bytes(jsonBuilder().startObject().field("area", inverse).endObject()); - client().prepareIndex("shapes", "polygon", "2").setSource(data, XContentType.JSON).get(); + client().prepareIndex("shapes").setId("2").setSource(data, XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); // re-check point on polygon hole @@ -351,7 +351,7 @@ public void testShapeRelations() throws Exception { ); data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); - client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).get(); + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); // Create a polygon crossing longitude 180 with hole. @@ -364,7 +364,7 @@ public void testShapeRelations() throws Exception { ); data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); - client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).get(); + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); result = client().prepareSearch() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java index 42edaddee99a2..c2d75b6aa55af 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java @@ -73,7 +73,8 @@ protected void setupSuiteScopeCluster() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "New York") @@ -84,7 +85,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 5.286 km - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "Times Square") @@ -95,7 +97,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 0.4621 km - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("name", "Tribeca") @@ -106,7 +109,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 1.055 km - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .field("name", "Wall Street") @@ -117,7 +121,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 1.258 km - client().prepareIndex("test", "type1", "5") + client().prepareIndex("test") + .setId("5") .setSource( jsonBuilder().startObject() .field("name", "Soho") @@ -128,7 +133,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 2.029 km - client().prepareIndex("test", "type1", "6") + client().prepareIndex("test") + .setId("6") .setSource( jsonBuilder().startObject() .field("name", "Greenwich Village") @@ -139,7 +145,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 8.572 km - client().prepareIndex("test", "type1", "7") + client().prepareIndex("test") + .setId("7") .setSource( jsonBuilder().startObject() .field("name", "Brooklyn") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index c37fe30c8311a..2db5973a2aa85 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -185,7 +185,7 @@ public void testIgnoreMalformed() throws Exception { .endObject() ); - indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape", polygonGeoJson)); + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson)); SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -206,7 +206,7 @@ public void testMappingUpdate() throws Exception { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().admin().indices().preparePutMapping("test").setType("geometry").setSource(update, XContentType.JSON).get() + () -> client().admin().indices().preparePutMapping("test").setSource(update, XContentType.JSON).get() ); assertThat(e.getMessage(), containsString("using [BKD] strategy cannot be merged with")); } @@ -237,10 +237,10 @@ public void testIndexShapeRouting() throws Exception { + " }\n" + "}"; - indexRandom(true, client().prepareIndex("test", "doc", "0").setSource(source, XContentType.JSON).setRouting("ABC")); + indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC")); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(geoShapeQuery("shape", "0", "doc").indexedShapeIndex("test").indexedShapeRouting("ABC")) + .setQuery(geoShapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -273,8 +273,8 @@ public void testIndexPolygonDateLine() throws Exception { String source = "{\n" + " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\"" + "}"; - indexRandom(true, client().prepareIndex("quad", "doc", "0").setSource(source, XContentType.JSON)); - indexRandom(true, client().prepareIndex("vector", "doc", "0").setSource(source, XContentType.JSON)); + indexRandom(true, client().prepareIndex("quad").setId("0").setSource(source, XContentType.JSON)); + indexRandom(true, client().prepareIndex("vector").setId("0").setSource(source, XContentType.JSON)); try { ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index 7f9f3c818f27f..479fd00e5e08b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -183,7 +183,7 @@ public void testIgnoreMalformed() throws Exception { .endObject() ); - indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape", polygonGeoJson)); + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson)); SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -215,10 +215,10 @@ public void testIndexShapeRouting() throws Exception { + " }\n" + "}"; - indexRandom(true, client().prepareIndex("test", "doc", "0").setSource(source, XContentType.JSON).setRouting("ABC")); + indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC")); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(geoShapeQuery("shape", "0", "doc").indexedShapeIndex("test").indexedShapeRouting("ABC")) + .setQuery(geoShapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -238,7 +238,7 @@ public void testLegacyCircle() throws Exception { ); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "_doc", "0").setSource("shape", (ToXContent) (builder, params) -> { + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", (ToXContent) (builder, params) -> { builder.startObject() .field("type", "circle") .startArray("coordinates") @@ -267,7 +267,7 @@ public void testDisallowExpensiveQueries() throws InterruptedException, IOExcept ); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "_doc").setId("0").setSource("shape", (ToXContent) (builder, params) -> { + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", (ToXContent) (builder, params) -> { builder.startObject() .field("type", "circle") .startArray("coordinates") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index 0a2ddb607ccc5..f5a2b76b89213 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -313,7 +313,8 @@ public void testMoreLikeThisIssue2197() throws Exception { XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() ); client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).get(); - client().prepareIndex("foo", "bar", "1") + client().prepareIndex("foo") + .setId("1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) .get(); client().admin().indices().prepareRefresh("foo").get(); @@ -337,7 +338,8 @@ public void testMoreLikeWithCustomRouting() throws Exception { client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).get(); ensureGreen(); - client().prepareIndex("foo", "bar", "1") + client().prepareIndex("foo") + .setId("1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) .setRouting("2") .get(); @@ -364,7 +366,8 @@ public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { ); ensureGreen(); - client().prepareIndex("foo", "bar", "1") + client().prepareIndex("foo") + .setId("1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) .setRouting("4000") .get(); @@ -395,10 +398,12 @@ public void testNumericField() throws Exception { .endObject() ).get(); ensureGreen(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("string_value", "lucene index").field("int_value", 1).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("string_value", "opensearch index").field("int_value", 42).endObject()) .get(); @@ -610,9 +615,9 @@ public void testSimpleMoreLikeThisIds() throws Exception { logger.info("Indexing..."); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "type1").setSource("text", "lucene").setId("1")); - builders.add(client().prepareIndex("test", "type1").setSource("text", "lucene release").setId("2")); - builders.add(client().prepareIndex("test", "type1").setSource("text", "apache lucene").setId("3")); + builders.add(client().prepareIndex("test").setSource("text", "lucene").setId("1")); + builders.add(client().prepareIndex("test").setSource("text", "lucene release").setId("2")); + builders.add(client().prepareIndex("test").setSource("text", "apache lucene").setId("3")); indexRandom(true, builders); logger.info("Running MoreLikeThis"); @@ -637,10 +642,10 @@ public void testMoreLikeThisMultiValueFields() throws Exception { String[] values = { "aaaa", "bbbb", "cccc", "dddd", "eeee", "ffff", "gggg", "hhhh", "iiii", "jjjj" }; List builders = new ArrayList<>(values.length + 1); // index one document with all the values - builders.add(client().prepareIndex("test", "type1", "0").setSource("text", values)); + builders.add(client().prepareIndex("test").setId("0").setSource("text", values)); // index each document with only one of the values for (int i = 0; i < values.length; i++) { - builders.add(client().prepareIndex("test", "type1", String.valueOf(i + 1)).setSource("text", values[i])); + builders.add(client().prepareIndex("test").setId(String.valueOf(i + 1)).setSource("text", values[i])); } indexRandom(true, builders); @@ -674,7 +679,7 @@ public void testMinimumShouldMatch() throws ExecutionException, InterruptedExcep for (int j = 1; j <= 10 - i; j++) { text += j + " "; } - builders.add(client().prepareIndex("test", "type1", i + "").setSource("text", text)); + builders.add(client().prepareIndex("test").setId(i + "").setSource("text", text)); } indexRandom(true, builders); @@ -708,7 +713,7 @@ public void testMoreLikeThisArtificialDocs() throws Exception { doc.field("field" + i, generateRandomStringArray(5, 10, false) + "a"); // make sure they are not all empty } doc.endObject(); - indexRandom(true, client().prepareIndex("test", "type1", "0").setSource(doc)); + indexRandom(true, client().prepareIndex("test").setId("0").setSource(doc)); logger.info("Checking the document matches ..."); // routing to ensure we hit the shard with the doc @@ -737,7 +742,8 @@ public void testMoreLikeThisMalformedArtificialDocs() throws Exception { logger.info("Creating an index with a single document ..."); indexRandom( true, - client().prepareIndex("test", MapperService.SINGLE_MAPPING_NAME, "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "Hello World!").field("date", "2009-01-01").endObject()) ); @@ -788,7 +794,7 @@ public void testMoreLikeThisUnlike() throws ExecutionException, InterruptedExcep logger.info("Indexing each field value of this document as a single document."); List builders = new ArrayList<>(); for (int i = 0; i < numFields; i++) { - builders.add(client().prepareIndex("test", "type1", i + "").setSource("field" + i, i + "")); + builders.add(client().prepareIndex("test").setId(i + "").setSource("field" + i, i + "")); } indexRandom(true, builders); @@ -826,9 +832,11 @@ public void testSelectFields() throws IOException, ExecutionException, Interrupt indexRandom( true, - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "hello world").field("text1", "opensearch").endObject()), - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("text", "goodby moon").field("text1", "opensearch").endObject()) ); @@ -850,9 +858,9 @@ public void testSelectFields() throws IOException, ExecutionException, Interrupt } public void testWithRouting() throws IOException { - client().prepareIndex("index", "type", "1").setRouting("3").setSource("text", "this is a document").get(); - client().prepareIndex("index", "type", "2").setRouting("1").setSource("text", "this is another document").get(); - client().prepareIndex("index", "type", "3").setRouting("4").setSource("text", "this is yet another document").get(); + client().prepareIndex("index").setId("1").setRouting("3").setSource("text", "this is a document").get(); + client().prepareIndex("index").setId("2").setRouting("1").setSource("text", "this is another document").get(); + client().prepareIndex("index").setId("3").setRouting("4").setSource("text", "this is yet another document").get(); refresh("index"); Item item = new Item("index", "2").routing("1"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java index 8f374793ea8bf..8226663abf49e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java @@ -49,8 +49,8 @@ public class MultiSearchIT extends OpenSearchIntegTestCase { public void testSimpleMultiSearch() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", "xxx").get(); - client().prepareIndex("test", "type", "2").setSource("field", "yyy").get(); + client().prepareIndex("test").setId("1").setSource("field", "xxx").get(); + client().prepareIndex("test").setId("2").setSource("field", "yyy").get(); refresh(); MultiSearchResponse response = client().prepareMultiSearch() .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "xxx"))) @@ -73,7 +73,7 @@ public void testSimpleMultiSearchMoreRequests() { createIndex("test"); int numDocs = randomIntBetween(0, 16); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index dac0a5d01b516..e4ad46c7599fe 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -82,7 +82,8 @@ public void testSimpleNested() throws Exception { searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", "value1") @@ -133,7 +134,8 @@ public void testSimpleNested() throws Exception { // add another doc, one that would match if it was not nested... - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("field1", "value1") @@ -196,7 +198,7 @@ public void testSimpleNested() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); // check delete, so all is gone... - DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "2").get(); + DeleteResponse deleteResponse = client().prepareDelete("test", "2").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); refresh(); @@ -231,7 +233,8 @@ public void testMultiNested() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field", "value") @@ -389,7 +392,8 @@ public void testDeleteNestedDocsWithAlias() throws Exception { ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", "value1") @@ -407,7 +411,8 @@ public void testDeleteNestedDocsWithAlias() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("field1", "value2") @@ -448,7 +453,8 @@ public void testExplain() throws Exception { ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", "value1") @@ -500,7 +506,8 @@ public void testSimpleNestedSorting() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", 1) @@ -515,7 +522,8 @@ public void testSimpleNestedSorting() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("field1", 2) @@ -530,7 +538,8 @@ public void testSimpleNestedSorting() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("field1", 3) @@ -600,7 +609,8 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", 1) @@ -617,7 +627,8 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("field1", 2) @@ -636,7 +647,8 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { .get(); // Doc with missing nested docs if nested filter is used refresh(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("field1", 3) @@ -739,7 +751,8 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( "{\n" + " \"acl\": [\n" @@ -793,7 +806,8 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( "{\n" + " \"acl\": [\n" @@ -979,7 +993,8 @@ public void testLeakingSortValues() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "test-type", "1") + client().prepareIndex("test") + .setId("1") .setSource( "{\n" + " \"nested1\": [\n" @@ -997,7 +1012,8 @@ public void testLeakingSortValues() throws Exception { ) .get(); - client().prepareIndex("test", "test-type", "2") + client().prepareIndex("test") + .setId("2") .setSource( "{\n" + " \"nested1\": [\n" @@ -1071,7 +1087,8 @@ public void testSortNestedWithNestedFilter() throws Exception { ensureGreen(); // sum: 11 - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("grand_parent_values", 1L) @@ -1113,7 +1130,8 @@ public void testSortNestedWithNestedFilter() throws Exception { .get(); // sum: 7 - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("grand_parent_values", 2L) @@ -1155,7 +1173,8 @@ public void testSortNestedWithNestedFilter() throws Exception { .get(); // sum: 2 - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("grand_parent_values", 3L) @@ -1469,7 +1488,8 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { ) ); - IndexResponse indexResponse1 = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse1 = client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("officelocation", "gendale") @@ -1522,7 +1542,8 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { .get(); assertTrue(indexResponse1.getShardInfo().getSuccessful() > 0); - IndexResponse indexResponse2 = client().prepareIndex("test", "type", "2") + IndexResponse indexResponse2 = client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("officelocation", "gendale") @@ -1603,8 +1624,8 @@ public void testCheckFixedBitSetCache() throws Exception { } assertAcked(prepareCreate("test").setSettings(settingsBuilder).addMapping("type")); - client().prepareIndex("test", "type", "0").setSource("field", "value").get(); - client().prepareIndex("test", "type", "1").setSource("field", "value").get(); + client().prepareIndex("test").setId("0").setSource("field", "value").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); ensureSearchable("test"); @@ -1613,7 +1634,7 @@ public void testCheckFixedBitSetCache() throws Exception { assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0L)); // Now add nested mapping - assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource("array1", "type=nested")); + assertAcked(client().admin().indices().preparePutMapping("test").setSource("array1", "type=nested")); XContentBuilder builder = jsonBuilder().startObject() .startArray("array1") @@ -1623,11 +1644,11 @@ public void testCheckFixedBitSetCache() throws Exception { .endArray() .endObject(); // index simple data - client().prepareIndex("test", "type", "2").setSource(builder).get(); - client().prepareIndex("test", "type", "3").setSource(builder).get(); - client().prepareIndex("test", "type", "4").setSource(builder).get(); - client().prepareIndex("test", "type", "5").setSource(builder).get(); - client().prepareIndex("test", "type", "6").setSource(builder).get(); + client().prepareIndex("test").setId("2").setSource(builder).get(); + client().prepareIndex("test").setId("3").setSource(builder).get(); + client().prepareIndex("test").setId("4").setSource(builder).get(); + client().prepareIndex("test").setId("5").setSource(builder).get(); + client().prepareIndex("test").setId("6").setSource(builder).get(); refresh(); ensureSearchable("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java index fc80cb848f306..c69555d00170b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java @@ -83,7 +83,7 @@ public void testStopOneNodePreferenceWithRedState() throws IOException { ); ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", "" + i).setSource("field1", "value1").get(); + client().prepareIndex("test").setId("" + i).setSource("field1", "value1").get(); } refresh(); internalCluster().stopRandomDataNode(); @@ -121,7 +121,7 @@ public void testNoPreferenceRandom() { ); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "value1").get(); + client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); final Client client = internalCluster().smartClient(); @@ -137,7 +137,7 @@ public void testSimplePreference() { client().admin().indices().prepareCreate("test").setSettings("{\"number_of_replicas\": 1}", XContentType.JSON).get(); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "value1").get(); + client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).get(); @@ -170,7 +170,7 @@ public void testNodesOnlyRandom() { ) ); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "value1").get(); + client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); final Client client = internalCluster().smartClient(); @@ -240,7 +240,7 @@ public void testCustomPreferenceUnaffectedByOtherShardMovements() { ) ); ensureGreen(); - client().prepareIndex("test", "_doc").setSource("field1", "value1").get(); + client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); final String customPreference = randomAlphaOfLength(10); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java index 2090ddc0a083b..24885c1f853ef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java @@ -35,7 +35,7 @@ public void testProfilerNetworkTime() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } List stringFields = Arrays.asList("field1"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java index 1c3e5e03a2f25..7d63db78e205a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java @@ -120,7 +120,7 @@ protected void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 5; i++) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(STRING_FIELD, randomFrom(randomStrings)) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java index 589c30894e3d1..65d3ee2779de8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java @@ -67,7 +67,7 @@ public void testProfileQuery() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } List stringFields = Arrays.asList("field1"); @@ -121,7 +121,8 @@ public void testProfileMatchesRegular() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)) + docs[i] = client().prepareIndex("test") + .setId(String.valueOf(i)) .setSource("id", String.valueOf(i), "field1", English.intToEnglish(i), "field2", i); } @@ -199,7 +200,7 @@ public void testSimpleMatch() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -239,7 +240,7 @@ public void testBool() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -299,7 +300,7 @@ public void testEmptyBool() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -342,7 +343,7 @@ public void testCollapsingBool() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -382,7 +383,7 @@ public void testBoosting() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -422,7 +423,7 @@ public void testDisMaxRange() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -462,7 +463,7 @@ public void testRange() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -501,7 +502,8 @@ public void testPhrase() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)) + docs[i] = client().prepareIndex("test") + .setId(String.valueOf(i)) .setSource("field1", English.intToEnglish(i) + " " + English.intToEnglish(i + 1), "field2", i); } @@ -556,7 +558,7 @@ public void testNoProfile() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java index 2b2a36ea76d8a..89c614485b620 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java @@ -113,7 +113,7 @@ public void testExists() throws Exception { emptyMap() }; List reqs = new ArrayList<>(); for (Map source : sources) { - reqs.add(client().prepareIndex("idx", "type").setSource(source)); + reqs.add(client().prepareIndex("idx").setSource(source)); } // We do NOT index dummy documents, otherwise the type for these dummy documents // would have _field_names indexed while the current type might not which might @@ -200,11 +200,11 @@ public void testFieldAlias() throws Exception { ensureGreen("idx"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); - indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); - indexRequests.add(client().prepareIndex("idx", "type").setSource("bar", 3)); - indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", singletonMap("bar", 2.718))); - indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", singletonMap("bar", 6.283))); + indexRequests.add(client().prepareIndex("idx").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx").setSource("bar", 3)); + indexRequests.add(client().prepareIndex("idx").setSource("foo", singletonMap("bar", 2.718))); + indexRequests.add(client().prepareIndex("idx").setSource("foo", singletonMap("bar", 6.283))); indexRandom(true, false, indexRequests); Map expected = new LinkedHashMap<>(); @@ -243,10 +243,10 @@ public void testFieldAliasWithNoDocValues() throws Exception { ensureGreen("idx"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); - indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); - indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", 3)); - indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", 43)); + indexRequests.add(client().prepareIndex("idx").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx").setSource("foo", 3)); + indexRequests.add(client().prepareIndex("idx").setSource("foo", 43)); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("idx").setQuery(QueryBuilders.existsQuery("foo-alias")).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java index 0bf0c05617efd..b40a034fc2c92 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java @@ -114,7 +114,8 @@ public void init() throws Exception { int numDocs = scaledRandomIntBetween(50, 100); List builders = new ArrayList<>(); builders.add( - client().prepareIndex("test", "test", "theone") + client().prepareIndex("test") + .setId("theone") .setSource( "id", "theone", @@ -133,7 +134,8 @@ public void init() throws Exception { ) ); builders.add( - client().prepareIndex("test", "test", "theother") + client().prepareIndex("test") + .setId("theother") .setSource( "id", "theother", @@ -151,7 +153,8 @@ public void init() throws Exception { ); builders.add( - client().prepareIndex("test", "test", "ultimate1") + client().prepareIndex("test") + .setId("ultimate1") .setSource( "id", "ultimate1", @@ -168,7 +171,8 @@ public void init() throws Exception { ) ); builders.add( - client().prepareIndex("test", "test", "ultimate2") + client().prepareIndex("test") + .setId("ultimate2") .setSource( "full_name", "Man the Ultimate Ninja", @@ -184,7 +188,8 @@ public void init() throws Exception { ); builders.add( - client().prepareIndex("test", "test", "anotherhero") + client().prepareIndex("test") + .setId("anotherhero") .setSource( "id", "anotherhero", @@ -202,7 +207,8 @@ public void init() throws Exception { ); builders.add( - client().prepareIndex("test", "test", "nowHero") + client().prepareIndex("test") + .setId("nowHero") .setSource( "id", "nowHero", @@ -229,7 +235,8 @@ public void init() throws Exception { String first = RandomPicks.randomFrom(random(), firstNames); String last = randomPickExcept(lastNames, first); builders.add( - client().prepareIndex("test", "test", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource( "id", i, @@ -1013,8 +1020,8 @@ public void testFuzzyFieldLevelBoosting() throws InterruptedException, Execution assertAcked(builder.addMapping("type", "title", "type=text", "body", "type=text")); ensureGreen(); List builders = new ArrayList<>(); - builders.add(client().prepareIndex(idx, "type", "1").setSource("title", "foo", "body", "bar")); - builders.add(client().prepareIndex(idx, "type", "2").setSource("title", "bar", "body", "foo")); + builders.add(client().prepareIndex(idx).setId("1").setSource("title", "foo", "body", "bar")); + builders.add(client().prepareIndex(idx).setId("2").setSource("title", "bar", "body", "foo")); indexRandom(true, false, builders); SearchResponse searchResponse = client().prepareSearch(idx) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java index 248754451e1bb..5b2d87a6508fe 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java @@ -90,9 +90,9 @@ protected Settings nodeSettings(int nodeOrdinal) { public void testBasicAllQuery() throws Exception { List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo bar baz")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f2", "Bar")); - reqs.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "foo bar baz")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo bar baz")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f2", "Bar")); + reqs.add(client().prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); @@ -110,8 +110,8 @@ public void testBasicAllQuery() throws Exception { public void testWithDate() throws Exception { List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo", "f_date", "2015/09/02")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "bar", "f_date", "2015/09/01")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo", "f_date", "2015/09/02")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); @@ -134,10 +134,10 @@ public void testWithDate() throws Exception { public void testWithLotsOfTypes() throws Exception { List reqs = new ArrayList<>(); reqs.add( - client().prepareIndex("test", "_doc", "1").setSource("f1", "foo", "f_date", "2015/09/02", "f_float", "1.7", "f_ip", "127.0.0.1") + client().prepareIndex("test").setId("1").setSource("f1", "foo", "f_date", "2015/09/02", "f_float", "1.7", "f_ip", "127.0.0.1") ); reqs.add( - client().prepareIndex("test", "_doc", "2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2") + client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2") ); indexRandom(true, false, reqs); @@ -161,7 +161,7 @@ public void testWithLotsOfTypes() throws Exception { public void testDocWithAllTypes() throws Exception { List reqs = new ArrayList<>(); String docBody = copyToStringFromClasspath("/org/opensearch/search/query/all-example-document.json"); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource(docBody, XContentType.JSON)); + reqs.add(client().prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); @@ -198,9 +198,9 @@ public void testDocWithAllTypes() throws Exception { public void testKeywordWithWhitespace() throws Exception { List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f2", "Foo Bar")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "bar")); - reqs.add(client().prepareIndex("test", "_doc", "3").setSource("f1", "foo bar")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f2", "Foo Bar")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar")); + reqs.add(client().prepareIndex("test").setId("3").setSource("f1", "foo bar")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); @@ -224,7 +224,7 @@ public void testAllFields() throws Exception { ensureGreen("test_1"); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test_1", "_doc", "1").setSource("f1", "foo", "f2", "eggplant")); + reqs.add(client().prepareIndex("test_1").setId("1").setSource("f1", "foo", "f2", "eggplant")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test_1") @@ -239,8 +239,8 @@ public void testAllFields() throws Exception { public void testPhraseQueryOnFieldWithNoPositions() throws Exception { List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo bar", "f4", "eggplant parmesan")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "foo bar", "f4", "chicken parmesan")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo bar", "f4", "eggplant parmesan")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "foo bar", "f4", "chicken parmesan")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("\"eggplant parmesan\"").lenient(true)).get(); @@ -289,7 +289,7 @@ public void testLimitOnExpandedFieldsButIgnoreUnmappedFields() throws Exception assertAcked(prepareCreate("ignoreunmappedfields").addMapping("_doc", builder)); - client().prepareIndex("ignoreunmappedfields", "_doc", "1").setSource("field1", "foo bar baz").get(); + client().prepareIndex("ignoreunmappedfields").setId("1").setSource("field1", "foo bar baz").get(); refresh(); QueryStringQueryBuilder qb = queryStringQuery("bar"); @@ -324,7 +324,7 @@ public void testLimitOnExpandedFields() throws Exception { ).addMapping("_doc", builder) ); - client().prepareIndex("testindex", "_doc", "1").setSource("field_A0", "foo bar baz").get(); + client().prepareIndex("testindex").setId("1").setSource("field_A0", "foo bar baz").get(); refresh(); // single field shouldn't trigger the limit @@ -375,9 +375,9 @@ private void doAssertLimitExceededException(String queryString, int exceedingFie public void testFieldAlias() throws Exception { List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("value").field("f3_alias")).get(); @@ -389,9 +389,9 @@ public void testFieldAlias() throws Exception { public void testFieldAliasWithEmbeddedFieldNames() throws Exception { List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("f3_alias:value AND f2:three")).get(); @@ -403,9 +403,9 @@ public void testFieldAliasWithEmbeddedFieldNames() throws Exception { public void testFieldAliasWithWildcardField() throws Exception { List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("value").field("f3_*")).get(); @@ -417,7 +417,7 @@ public void testFieldAliasWithWildcardField() throws Exception { public void testFieldAliasOnDisallowedFieldType() throws Exception { List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); indexRandom(true, false, indexRequests); // The wildcard field matches aliases for both a text and geo_point field. diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java index c36b0f0fedbc9..59f9e020df0d9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java @@ -91,7 +91,7 @@ public void testScriptScore() { assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { - client().prepareIndex("test-index", "_doc", "" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); + client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); } refresh(); @@ -117,7 +117,7 @@ public void testScriptScoreBoolQuery() { assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { - client().prepareIndex("test-index", "_doc", "" + i).setSource("field1", "text" + i, "field2", i).get(); + client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + i, "field2", i).get(); } refresh(); @@ -138,9 +138,9 @@ public void testRewrittenQuery() { prepareCreate("test-index2").setSettings(Settings.builder().put("index.number_of_shards", 1)) .addMapping("_doc", "field1", "type=date", "field2", "type=double") ); - client().prepareIndex("test-index2", "_doc", "1").setSource("field1", "2019-09-01", "field2", 1).get(); - client().prepareIndex("test-index2", "_doc", "2").setSource("field1", "2019-10-01", "field2", 2).get(); - client().prepareIndex("test-index2", "_doc", "3").setSource("field1", "2019-11-01", "field2", 3).get(); + client().prepareIndex("test-index2").setId("1").setSource("field1", "2019-09-01", "field2", 1).get(); + client().prepareIndex("test-index2").setId("2").setSource("field1", "2019-10-01", "field2", 2).get(); + client().prepareIndex("test-index2").setId("3").setSource("field1", "2019-11-01", "field2", 3).get(); refresh(); RangeQueryBuilder rangeQB = new RangeQueryBuilder("field1").from("2019-01-01"); // the query should be rewritten to from:null @@ -157,7 +157,7 @@ public void testDisallowExpensiveQueries() { assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { - client().prepareIndex("test-index", "_doc").setId("" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); + client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index dc0bcafa43f37..69a8fa138d1d6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -163,9 +163,9 @@ public void testEmptyQueryString() throws ExecutionException, InterruptedExcepti createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumps"), - client().prepareIndex("test", "type1", "2").setSource("field1", "quick brown"), - client().prepareIndex("test", "type1", "3").setSource("field1", "quick") + client().prepareIndex("test").setId("1").setSource("field1", "the quick brown fox jumps"), + client().prepareIndex("test").setId("2").setSource("field1", "quick brown"), + client().prepareIndex("test").setId("3").setSource("field1", "quick") ); assertHitCount(client().prepareSearch().setQuery(queryStringQuery("quick")).get(), 3L); @@ -175,9 +175,9 @@ public void testEmptyQueryString() throws ExecutionException, InterruptedExcepti // see https://github.com/elastic/elasticsearch/issues/3177 public void testIssue3177() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value2").get(); - client().prepareIndex("test", "type1", "3").setSource("field1", "value3").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("2").setSource("field1", "value2").get(); + client().prepareIndex("test").setId("3").setSource("field1", "value3").get(); ensureGreen(); waitForRelocation(); forceMerge(); @@ -214,9 +214,8 @@ public void testIndexOptions() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text,index_options=docs")); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), - client().prepareIndex("test", "type1", "2") - .setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") + client().prepareIndex("test").setId("1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), + client().prepareIndex("test").setId("2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") ); SearchResponse searchResponse = client().prepareSearch().setQuery(matchPhraseQuery("field2", "quick brown").slop(0)).get(); @@ -235,9 +234,8 @@ public void testConstantScoreQuery() throws Exception { createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), - client().prepareIndex("test", "type1", "2") - .setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") + client().prepareIndex("test").setId("1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), + client().prepareIndex("test").setId("2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") ); SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("field1", "quick"))).get(); @@ -279,7 +277,7 @@ public void testConstantScoreQuery() throws Exception { int num = scaledRandomIntBetween(100, 200); IndexRequestBuilder[] builders = new IndexRequestBuilder[num]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test_1", "type", "" + i).setSource("f", English.intToEnglish(i)); + builders[i] = client().prepareIndex("test_1").setId("" + i).setSource("f", English.intToEnglish(i)); } createIndex("test_1"); indexRandom(true, builders); @@ -316,8 +314,8 @@ public void testAllDocsQueryString() throws InterruptedException, ExecutionExcep createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("foo", "bar"), - client().prepareIndex("test", "type1", "2").setSource("foo", "bar") + client().prepareIndex("test").setId("1").setSource("foo", "bar"), + client().prepareIndex("test").setId("2").setSource("foo", "bar") ); int iters = scaledRandomIntBetween(100, 200); @@ -344,10 +342,11 @@ public void testCommonTermsQuery() throws Exception { .get(); indexRandom( true, - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource("field1", "quick lazy huge brown pidgin", "field2", "the quick lazy huge brown fox jumps over the tree"), - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox"), - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") + client().prepareIndex("test").setId("1").setSource("field1", "the quick brown fox"), + client().prepareIndex("test").setId("2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") ); SearchResponse searchResponse = client().prepareSearch() @@ -441,7 +440,7 @@ public void testCommonTermsQuery() throws Exception { public void testQueryStringAnalyzedWildcard() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value_1", "field2", "value_2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value_1", "field2", "value_2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("value*")).get(); @@ -463,7 +462,7 @@ public void testQueryStringAnalyzedWildcard() throws Exception { public void testLowercaseExpandedTerms() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value_1", "field2", "value_2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value_1", "field2", "value_2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("VALUE_3~1")).get(); @@ -485,7 +484,7 @@ public void testDateRangeInQueryString() { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1)); String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1)); - client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); + client().prepareIndex("test").setId("1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get(); @@ -511,7 +510,7 @@ public void testDateRangeInQueryStringWithTimeZone_7880() { ZoneId timeZone = randomZone(); String now = DateFormatter.forPattern("strict_date_optional_time").format(Instant.now().atZone(timeZone)); logger.info(" --> Using time_zone [{}], now is [{}]", timeZone.getId(), now); - client().prepareIndex("test", "type", "1").setSource("past", now).get(); + client().prepareIndex("test").setId("1").setSource("past", now).get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -526,8 +525,8 @@ public void testDateRangeInQueryStringWithTimeZone_10477() { // as with dynamic mappings some shards might be lacking behind and parse a different query assertAcked(prepareCreate("test").addMapping("type", "past", "type=date")); - client().prepareIndex("test", "type", "1").setSource("past", "2015-04-05T23:00:00+0000").get(); - client().prepareIndex("test", "type", "2").setSource("past", "2015-04-06T00:00:00+0000").get(); + client().prepareIndex("test").setId("1").setSource("past", "2015-04-05T23:00:00+0000").get(); + client().prepareIndex("test").setId("2").setSource("past", "2015-04-06T00:00:00+0000").get(); refresh(); // Timezone set with dates @@ -560,9 +559,9 @@ public void testIdsQueryTestsIdIndexed() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "value3") + client().prepareIndex("test").setId("1").setSource("field1", "value1"), + client().prepareIndex("test").setId("2").setSource("field1", "value2"), + client().prepareIndex("test").setId("3").setSource("field1", "value3") ); SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get(); @@ -587,7 +586,7 @@ public void testTermIndexQuery() throws Exception { for (String indexName : indexNames) { assertAcked(client().admin().indices().prepareCreate(indexName)); - indexRandom(true, client().prepareIndex(indexName, "type1", indexName + "1").setSource("field1", "value1")); + indexRandom(true, client().prepareIndex(indexName).setId(indexName + "1").setSource("field1", "value1")); } @@ -621,7 +620,8 @@ public void testFilterExistsMissing() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startObject("obj1") @@ -632,7 +632,8 @@ public void testFilterExistsMissing() throws Exception { .field("field2", "value2_1") .endObject() ), - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .startObject("obj1") @@ -642,7 +643,8 @@ public void testFilterExistsMissing() throws Exception { .field("field1", "value1_2") .endObject() ), - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .startObject("obj2") @@ -652,7 +654,8 @@ public void testFilterExistsMissing() throws Exception { .field("field2", "value2_3") .endObject() ), - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .startObject("obj2") @@ -698,7 +701,7 @@ public void testFilterExistsMissing() throws Exception { public void testPassQueryOrFilterAsJSONString() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1_1", "field2", "value2_1").setRefreshPolicy(IMMEDIATE).get(); WrapperQueryBuilder wrapper = new WrapperQueryBuilder("{ \"term\" : { \"field1\" : \"value1_1\" } }"); assertHitCount(client().prepareSearch().setQuery(wrapper).get(), 1L); @@ -713,7 +716,7 @@ public void testPassQueryOrFilterAsJSONString() throws Exception { public void testFiltersWithCustomCacheKey() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))).get(); assertHitCount(searchResponse, 1L); @@ -733,9 +736,9 @@ public void testMatchQueryNumeric() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("long", 1L, "double", 1.0d), - client().prepareIndex("test", "type1", "2").setSource("long", 2L, "double", 2.0d), - client().prepareIndex("test", "type1", "3").setSource("long", 3L, "double", 3.0d) + client().prepareIndex("test").setId("1").setSource("long", 1L, "double", 1.0d), + client().prepareIndex("test").setId("2").setSource("long", 2L, "double", 2.0d), + client().prepareIndex("test").setId("3").setSource("long", 3L, "double", 3.0d) ); SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("long", "1")).get(); @@ -753,8 +756,8 @@ public void testMatchQueryFuzzy() throws Exception { indexRandom( true, - client().prepareIndex("test", "_doc", "1").setSource("text", "Unit"), - client().prepareIndex("test", "_doc", "2").setSource("text", "Unity") + client().prepareIndex("test").setId("1").setSource("text", "Unit"), + client().prepareIndex("test").setId("2").setSource("text", "Unity") ); SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness("0")).get(); @@ -781,9 +784,9 @@ public void testMultiMatchQuery() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value4", "field3", "value3"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value2", "field2", "value5", "field3", "value2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "value3", "field2", "value6", "field3", "value1") + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value4", "field3", "value3"), + client().prepareIndex("test").setId("2").setSource("field1", "value2", "field2", "value5", "field3", "value2"), + client().prepareIndex("test").setId("3").setSource("field1", "value3", "field2", "value6", "field3", "value1") ); MultiMatchQueryBuilder builder = multiMatchQuery("value1 value2 value4", "field1", "field2"); @@ -825,7 +828,7 @@ public void testMultiMatchQuery() throws Exception { assertSearchHits(searchResponse, "3", "1"); // Test lenient - client().prepareIndex("test", "type1", "3").setSource("field1", "value7", "field2", "value8", "field4", 5).get(); + client().prepareIndex("test").setId("3").setSource("field1", "value7", "field2", "value8", "field4", 5).get(); refresh(); builder = multiMatchQuery("value1", "field1", "field2", "field4"); @@ -846,8 +849,8 @@ public void testMatchQueryZeroTermsQuery() { assertAcked( prepareCreate("test").addMapping("type1", "field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic") ); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("2").setSource("field1", "value2").get(); refresh(); BoolQueryBuilder boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)) @@ -869,8 +872,8 @@ public void testMultiMatchQueryZeroTermsQuery() { assertAcked( prepareCreate("test").addMapping("type1", "field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic") ); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value3", "field2", "value4").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("2").setSource("field1", "value3", "field2", "value4").get(); refresh(); BoolQueryBuilder boolQuery = boolQuery().must( @@ -893,8 +896,8 @@ public void testMultiMatchQueryZeroTermsQuery() { public void testMultiMatchQueryMinShouldMatch() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", new String[] { "value1", "value2", "value3" }).get(); - client().prepareIndex("test", "type1", "2").setSource("field2", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", new String[] { "value1", "value2", "value3" }).get(); + client().prepareIndex("test").setId("2").setSource("field2", "value1").get(); refresh(); MultiMatchQueryBuilder multiMatchQuery = multiMatchQuery("value1 value2 foo", "field1", "field2"); @@ -939,8 +942,8 @@ public void testMultiMatchQueryMinShouldMatch() { public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws IOException { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", new String[] { "value1", "value2", "value3" }).get(); - client().prepareIndex("test", "type1", "2").setSource("field2", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", new String[] { "value1", "value2", "value3" }).get(); + client().prepareIndex("test").setId("2").setSource("field2", "value1").get(); refresh(); BoolQueryBuilder boolQuery = boolQuery().must(termQuery("field1", "value1")) @@ -971,8 +974,8 @@ public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws public void testFuzzyQueryString() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("str", "foobar", "date", "2012-02-01", "num", 12).get(); - client().prepareIndex("test", "type1", "2").setSource("str", "fred", "date", "2012-02-05", "num", 20).get(); + client().prepareIndex("test").setId("1").setSource("str", "foobar", "date", "2012-02-01", "num", 12).get(); + client().prepareIndex("test").setId("2").setSource("str", "fred", "date", "2012-02-05", "num", 20).get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("str:foobaz~1")).get(); @@ -989,8 +992,8 @@ public void testQuotedQueryStringWithBoost() throws InterruptedException { indexRandom( true, false, - client().prepareIndex("test", "type1", "1").setSource("important", "phrase match", "less_important", "nothing important"), - client().prepareIndex("test", "type1", "2").setSource("important", "nothing important", "less_important", "phrase match") + client().prepareIndex("test").setId("1").setSource("important", "phrase match", "less_important", "nothing important"), + client().prepareIndex("test").setId("2").setSource("important", "nothing important", "less_important", "phrase match") ); SearchResponse searchResponse = client().prepareSearch() @@ -1007,8 +1010,8 @@ public void testQuotedQueryStringWithBoost() throws InterruptedException { public void testSpecialRangeSyntaxInQueryString() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("str", "foobar", "date", "2012-02-01", "num", 12).get(); - client().prepareIndex("test", "type1", "2").setSource("str", "fred", "date", "2012-02-05", "num", 20).get(); + client().prepareIndex("test").setId("1").setSource("str", "foobar", "date", "2012-02-01", "num", 12).get(); + client().prepareIndex("test").setId("2").setSource("str", "fred", "date", "2012-02-05", "num", 20).get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:>19")).get(); @@ -1040,10 +1043,10 @@ public void testEmptytermsQuery() throws Exception { indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("term", "1"), - client().prepareIndex("test", "type", "2").setSource("term", "2"), - client().prepareIndex("test", "type", "3").setSource("term", "3"), - client().prepareIndex("test", "type", "4").setSource("term", "4") + client().prepareIndex("test").setId("1").setSource("term", "1"), + client().prepareIndex("test").setId("2").setSource("term", "2"), + client().prepareIndex("test").setId("3").setSource("term", "3"), + client().prepareIndex("test").setId("4").setSource("term", "4") ); SearchResponse searchResponse = client().prepareSearch("test") @@ -1060,10 +1063,10 @@ public void testTermsQuery() throws Exception { indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("str", "1", "lng", 1L, "dbl", 1.0d), - client().prepareIndex("test", "type", "2").setSource("str", "2", "lng", 2L, "dbl", 2.0d), - client().prepareIndex("test", "type", "3").setSource("str", "3", "lng", 3L, "dbl", 3.0d), - client().prepareIndex("test", "type", "4").setSource("str", "4", "lng", 4L, "dbl", 4.0d) + client().prepareIndex("test").setId("1").setSource("str", "1", "lng", 1L, "dbl", 1.0d), + client().prepareIndex("test").setId("2").setSource("str", "2", "lng", 2L, "dbl", 2.0d), + client().prepareIndex("test").setId("3").setSource("str", "3", "lng", 3L, "dbl", 3.0d), + client().prepareIndex("test").setId("4").setSource("str", "4", "lng", 4L, "dbl", 4.0d) ); SearchResponse searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("str", "1", "4"))).get(); @@ -1138,11 +1141,12 @@ public void testTermsLookupFilter() throws Exception { indexRandom( true, - client().prepareIndex("lookup", "type", "1").setSource("terms", new String[] { "1", "3" }), - client().prepareIndex("lookup", "type", "2").setSource("terms", new String[] { "2" }), - client().prepareIndex("lookup", "type", "3").setSource("terms", new String[] { "2", "4" }), - client().prepareIndex("lookup", "type", "4").setSource("other", "value"), - client().prepareIndex("lookup2", "type", "1") + client().prepareIndex("lookup").setId("1").setSource("terms", new String[] { "1", "3" }), + client().prepareIndex("lookup").setId("2").setSource("terms", new String[] { "2" }), + client().prepareIndex("lookup").setId("3").setSource("terms", new String[] { "2", "4" }), + client().prepareIndex("lookup").setId("4").setSource("other", "value"), + client().prepareIndex("lookup2") + .setId("1") .setSource( XContentFactory.jsonBuilder() .startObject() @@ -1156,7 +1160,8 @@ public void testTermsLookupFilter() throws Exception { .endArray() .endObject() ), - client().prepareIndex("lookup2", "type", "2") + client().prepareIndex("lookup2") + .setId("2") .setSource( XContentFactory.jsonBuilder() .startObject() @@ -1167,7 +1172,8 @@ public void testTermsLookupFilter() throws Exception { .endArray() .endObject() ), - client().prepareIndex("lookup2", "type", "3") + client().prepareIndex("lookup2") + .setId("3") .setSource( XContentFactory.jsonBuilder() .startObject() @@ -1181,11 +1187,11 @@ public void testTermsLookupFilter() throws Exception { .endArray() .endObject() ), - client().prepareIndex("lookup3", "type", "1").setSource("terms", new String[] { "1", "3" }), - client().prepareIndex("test", "type", "1").setSource("term", "1"), - client().prepareIndex("test", "type", "2").setSource("term", "2"), - client().prepareIndex("test", "type", "3").setSource("term", "3"), - client().prepareIndex("test", "type", "4").setSource("term", "4") + client().prepareIndex("lookup3").setId("1").setSource("terms", new String[] { "1", "3" }), + client().prepareIndex("test").setId("1").setSource("term", "1"), + client().prepareIndex("test").setId("2").setSource("term", "2"), + client().prepareIndex("test").setId("3").setSource("term", "3"), + client().prepareIndex("test").setId("4").setSource("term", "4") ); SearchResponse searchResponse = client().prepareSearch("test") @@ -1264,9 +1270,9 @@ public void testTermsLookupFilter() throws Exception { public void testBasicQueryById() throws Exception { assertAcked(prepareCreate("test")); - client().prepareIndex("test", "_doc", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "_doc", "2").setSource("field1", "value2").get(); - client().prepareIndex("test", "_doc", "3").setSource("field1", "value3").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("2").setSource("field1", "value2").get(); + client().prepareIndex("test").setId("3").setSource("field1", "value3").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); @@ -1309,15 +1315,18 @@ public void testNumericTermsAndRanges() throws Exception { ) ); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource("num_byte", 1, "num_short", 1, "num_integer", 1, "num_long", 1, "num_float", 1, "num_double", 1) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource("num_byte", 2, "num_short", 2, "num_integer", 2, "num_long", 2, "num_float", 2, "num_double", 2) .get(); - client().prepareIndex("test", "type1", "17") + client().prepareIndex("test") + .setId("17") .setSource("num_byte", 17, "num_short", 17, "num_integer", 17, "num_long", 17, "num_float", 17, "num_double", 17) .get(); refresh(); @@ -1423,10 +1432,10 @@ public void testNumericRangeFilter_2826() throws Exception { ) ); - client().prepareIndex("test", "type1", "1").setSource("field1", "test1", "num_long", 1).get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "test1", "num_long", 2).get(); - client().prepareIndex("test", "type1", "3").setSource("field1", "test2", "num_long", 3).get(); - client().prepareIndex("test", "type1", "4").setSource("field1", "test2", "num_long", 4).get(); + client().prepareIndex("test").setId("1").setSource("field1", "test1", "num_long", 1).get(); + client().prepareIndex("test").setId("2").setSource("field1", "test1", "num_long", 2).get(); + client().prepareIndex("test").setId("3").setSource("field1", "test2", "num_long", 3).get(); + client().prepareIndex("test").setId("4").setSource("field1", "test2", "num_long", 4).get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1461,10 +1470,10 @@ public void testMustNot() throws IOException, ExecutionException, InterruptedExc indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar"), - client().prepareIndex("test", "test", "2").setSource("description", "foo other anything"), - client().prepareIndex("test", "test", "3").setSource("description", "foo other"), - client().prepareIndex("test", "test", "4").setSource("description", "foo") + client().prepareIndex("test").setId("1").setSource("description", "foo other anything bar"), + client().prepareIndex("test").setId("2").setSource("description", "foo other anything"), + client().prepareIndex("test").setId("3").setSource("description", "foo other"), + client().prepareIndex("test").setId("4").setSource("description", "foo") ); SearchResponse searchResponse = client().prepareSearch("test") @@ -1485,7 +1494,7 @@ public void testIntervals() throws InterruptedException { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("description", "it's cold outside, there's no kind of atmosphere") + client().prepareIndex("test").setId("1").setSource("description", "it's cold outside, there's no kind of atmosphere") ); String json = "{ \"intervals\" : " @@ -1509,10 +1518,10 @@ public void testSimpleSpan() throws IOException, ExecutionException, Interrupted indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar"), - client().prepareIndex("test", "test", "2").setSource("description", "foo other anything"), - client().prepareIndex("test", "test", "3").setSource("description", "foo other"), - client().prepareIndex("test", "test", "4").setSource("description", "foo") + client().prepareIndex("test").setId("1").setSource("description", "foo other anything bar"), + client().prepareIndex("test").setId("2").setSource("description", "foo other anything"), + client().prepareIndex("test").setId("3").setSource("description", "foo other"), + client().prepareIndex("test").setId("4").setSource("description", "foo") ); SearchResponse searchResponse = client().prepareSearch("test").setQuery(spanOrQuery(spanTermQuery("description", "bar"))).get(); @@ -1527,10 +1536,10 @@ public void testSimpleSpan() throws IOException, ExecutionException, Interrupted public void testSpanMultiTermQuery() throws IOException { createIndex("test"); - client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar", "count", 1).get(); - client().prepareIndex("test", "test", "2").setSource("description", "foo other anything", "count", 2).get(); - client().prepareIndex("test", "test", "3").setSource("description", "foo other", "count", 3).get(); - client().prepareIndex("test", "test", "4").setSource("description", "fop", "count", 4).get(); + client().prepareIndex("test").setId("1").setSource("description", "foo other anything bar", "count", 1).get(); + client().prepareIndex("test").setId("2").setSource("description", "foo other anything", "count", 2).get(); + client().prepareIndex("test").setId("3").setSource("description", "foo other", "count", 3).get(); + client().prepareIndex("test").setId("4").setSource("description", "fop", "count", 4).get(); refresh(); SearchResponse response = client().prepareSearch("test") @@ -1560,8 +1569,8 @@ public void testSpanMultiTermQuery() throws IOException { public void testSpanNot() throws IOException, ExecutionException, InterruptedException { createIndex("test"); - client().prepareIndex("test", "test", "1").setSource("description", "the quick brown fox jumped over the lazy dog").get(); - client().prepareIndex("test", "test", "2").setSource("description", "the quick black fox leaped over the sleeping dog").get(); + client().prepareIndex("test").setId("1").setSource("description", "the quick brown fox jumped over the lazy dog").get(); + client().prepareIndex("test").setId("2").setSource("description", "the quick black fox leaped over the sleeping dog").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1628,19 +1637,23 @@ public void testSimpleDFSQuery() throws IOException { ) ); - client().prepareIndex("test", "_doc", "1") + client().prepareIndex("test") + .setId("1") .setRouting("Y") .setSource("online", false, "bs", "Y", "ts", System.currentTimeMillis() - 100, "type", "s") .get(); - client().prepareIndex("test", "_doc", "2") + client().prepareIndex("test") + .setId("2") .setRouting("X") .setSource("online", true, "bs", "X", "ts", System.currentTimeMillis() - 10000000, "type", "s") .get(); - client().prepareIndex("test", "_doc", "3") + client().prepareIndex("test") + .setId("3") .setRouting(randomAlphaOfLength(2)) .setSource("online", false, "ts", System.currentTimeMillis() - 100, "type", "bs") .get(); - client().prepareIndex("test", "_doc", "4") + client().prepareIndex("test") + .setId("4") .setRouting(randomAlphaOfLength(2)) .setSource("online", true, "ts", System.currentTimeMillis() - 123123, "type", "bs") .get(); @@ -1668,7 +1681,7 @@ public void testSimpleDFSQuery() throws IOException { } public void testMultiFieldQueryString() { - client().prepareIndex("test", "s", "1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); logger.info("regular"); assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("value1").field("field1").field("field2")).get(), 1); @@ -1691,7 +1704,7 @@ public void testMultiFieldQueryString() { public void testMultiMatchLenientIssue3797() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", 123, "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", 123, "field2", "value2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1711,10 +1724,10 @@ public void testMultiMatchLenientIssue3797() { public void testMinScore() throws ExecutionException, InterruptedException { createIndex("test"); - client().prepareIndex("test", "test", "1").setSource("score", 1.5).get(); - client().prepareIndex("test", "test", "2").setSource("score", 1.0).get(); - client().prepareIndex("test", "test", "3").setSource("score", 2.0).get(); - client().prepareIndex("test", "test", "4").setSource("score", 0.5).get(); + client().prepareIndex("test").setId("1").setSource("score", 1.5).get(); + client().prepareIndex("test").setId("2").setSource("score", 1.0).get(); + client().prepareIndex("test").setId("3").setSource("score", 2.0).get(); + client().prepareIndex("test").setId("4").setSource("score", 0.5).get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1728,8 +1741,8 @@ public void testMinScore() throws ExecutionException, InterruptedException { public void testQueryStringWithSlopAndFields() { assertAcked(prepareCreate("test")); - client().prepareIndex("test", "_doc", "1").setSource("desc", "one two three", "type", "customer").get(); - client().prepareIndex("test", "_doc", "2").setSource("desc", "one two three", "type", "product").get(); + client().prepareIndex("test").setId("1").setSource("desc", "one two three", "type", "customer").get(); + client().prepareIndex("test").setId("2").setSource("desc", "one two three", "type", "product").get(); refresh(); { SearchResponse searchResponse = client().prepareSearch("test") @@ -1769,17 +1782,15 @@ public void testQueryStringWithSlopAndFields() { public void testDateProvidedAsNumber() throws InterruptedException { createIndex("test"); - assertAcked( - client().admin().indices().preparePutMapping("test").setType("type").setSource("field", "type=date,format=epoch_millis").get() - ); + assertAcked(client().admin().indices().preparePutMapping("test").setSource("field", "type=date,format=epoch_millis").get()); indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("field", 1000000000001L), - client().prepareIndex("test", "type", "2").setSource("field", 1000000000000L), - client().prepareIndex("test", "type", "3").setSource("field", 999999999999L), - client().prepareIndex("test", "type", "4").setSource("field", 1000000000002L), - client().prepareIndex("test", "type", "5").setSource("field", 1000000000003L), - client().prepareIndex("test", "type", "6").setSource("field", 999999999999L) + client().prepareIndex("test").setId("1").setSource("field", 1000000000001L), + client().prepareIndex("test").setId("2").setSource("field", 1000000000000L), + client().prepareIndex("test").setId("3").setSource("field", 999999999999L), + client().prepareIndex("test").setId("4").setSource("field", 1000000000002L), + client().prepareIndex("test").setId("5").setSource("field", 1000000000003L), + client().prepareIndex("test").setId("6").setSource("field", 999999999999L) ); assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").gte(1000000000000L)).get(), 4); @@ -1791,11 +1802,12 @@ public void testRangeQueryWithTimeZone() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("date", "2014-01-01", "num", 1), - client().prepareIndex("test", "type1", "2").setSource("date", "2013-12-31T23:00:00", "num", 2), - client().prepareIndex("test", "type1", "3").setSource("date", "2014-01-01T01:00:00", "num", 3), + client().prepareIndex("test").setId("1").setSource("date", "2014-01-01", "num", 1), + client().prepareIndex("test").setId("2").setSource("date", "2013-12-31T23:00:00", "num", 2), + client().prepareIndex("test").setId("3").setSource("date", "2014-01-01T01:00:00", "num", 3), // Now in UTC+1 - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource("date", Instant.now().atZone(ZoneOffset.ofHours(1)).toInstant().toEpochMilli(), "num", 4) ); @@ -1892,8 +1904,8 @@ public void testRangeQueryWithLocaleMapping() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("date_field", "Mi, 06 Dez 2000 02:55:00 -0800"), - client().prepareIndex("test", "type1", "2").setSource("date_field", "Do, 07 Dez 2000 02:55:00 -0800") + client().prepareIndex("test").setId("1").setSource("date_field", "Mi, 06 Dez 2000 02:55:00 -0800"), + client().prepareIndex("test").setId("2").setSource("date_field", "Do, 07 Dez 2000 02:55:00 -0800") ); SearchResponse searchResponse = client().prepareSearch("test") @@ -1909,7 +1921,7 @@ public void testRangeQueryWithLocaleMapping() throws Exception { public void testSearchEmptyDoc() { assertAcked(prepareCreate("test").setSettings("{\"index.analysis.analyzer.default.type\":\"keyword\"}", XContentType.JSON)); - client().prepareIndex("test", "type1", "1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); refresh(); assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); @@ -1919,8 +1931,8 @@ public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedE createIndex("test1"); indexRandom( true, - client().prepareIndex("test1", "type1", "1").setSource("field", "Johnnie Walker Black Label"), - client().prepareIndex("test1", "type1", "2").setSource("field", "trying out OpenSearch") + client().prepareIndex("test1").setId("1").setSource("field", "Johnnie Walker Black Label"), + client().prepareIndex("test1").setId("2").setSource("field", "trying out OpenSearch") ); SearchResponse searchResponse = client().prepareSearch() @@ -1938,7 +1950,7 @@ public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedE public void testQueryStringParserCache() throws Exception { createIndex("test"); - indexRandom(true, false, client().prepareIndex("test", "type", "1").setSource("nameTokens", "xyz")); + indexRandom(true, false, client().prepareIndex("test").setId("1").setSource("nameTokens", "xyz")); SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) @@ -1964,7 +1976,8 @@ public void testQueryStringParserCache() throws Exception { public void testRangeQueryRangeFields_24744() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "int_range", "type=integer_range")); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().startObject("int_range").field("gte", 10).field("lte", 20).endObject().endObject()) .get(); refresh(); @@ -1977,7 +1990,7 @@ public void testRangeQueryRangeFields_24744() throws Exception { public void testRangeQueryTypeField_31476() throws Exception { assertAcked(prepareCreate("test").addMapping("foo", "field", "type=keyword")); - client().prepareIndex("test", "foo", "1").setSource("field", "value").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); RangeQueryBuilder range = new RangeQueryBuilder("_type").from("ape").to("zebra"); @@ -2068,10 +2081,7 @@ public void testFieldAliasesForMetaFields() throws Exception { .endObject(); assertAcked(prepareCreate("test").addMapping("type", mapping)); - IndexRequestBuilder indexRequest = client().prepareIndex("test", "type") - .setId("1") - .setRouting("custom") - .setSource("field", "value"); + IndexRequestBuilder indexRequest = client().prepareIndex("test").setId("1").setRouting("custom").setSource("field", "value"); indexRandom(true, false, indexRequest); client().admin() .cluster() @@ -2114,7 +2124,7 @@ public void testWildcardQueryNormalizationOnKeywordField() { .build() ).addMapping("_doc", "field1", "type=keyword,normalizer=lowercase_normalizer") ); - client().prepareIndex("test", "_doc", "1").setSource("field1", "Bbb Aaa").get(); + client().prepareIndex("test").setId("1").setSource("field1", "Bbb Aaa").get(); refresh(); { @@ -2141,7 +2151,7 @@ public void testWildcardQueryNormalizationOnTextField() { .build() ).addMapping("_doc", "field1", "type=text,analyzer=lowercase_analyzer") ); - client().prepareIndex("test", "_doc", "1").setSource("field1", "Bbb Aaa").get(); + client().prepareIndex("test").setId("1").setSource("field1", "Bbb Aaa").get(); refresh(); { @@ -2169,7 +2179,7 @@ public void testWildcardQueryNormalizationKeywordSpecialCharacters() { .build() ).addMapping("_doc", "field", "type=keyword,normalizer=no_wildcard") ); - client().prepareIndex("test", "_doc", "1").setSource("field", "label-1").get(); + client().prepareIndex("test").setId("1").setSource("field", "label-1").get(); refresh(); WildcardQueryBuilder wildCardQuery = wildcardQuery("field", "la*"); @@ -2223,7 +2233,7 @@ public Map> getTokenizers() { */ public void testIssueFuzzyInsideSpanMulti() { createIndex("test"); - client().prepareIndex("test", "_doc", "1").setSource("field", "foobarbaz").get(); + client().prepareIndex("test").setId("1").setSource("field", "foobarbaz").get(); ensureGreen(); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index d23ddabedd348..6bd4eec37407f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -124,12 +124,12 @@ public void testSimpleQueryString() throws ExecutionException, InterruptedExcept indexRandom( true, false, - client().prepareIndex("test", "type1", "1").setSource("body", "foo"), - client().prepareIndex("test", "type1", "2").setSource("body", "bar"), - client().prepareIndex("test", "type1", "3").setSource("body", "foo bar"), - client().prepareIndex("test", "type1", "4").setSource("body", "quux baz eggplant"), - client().prepareIndex("test", "type1", "5").setSource("body", "quux baz spaghetti"), - client().prepareIndex("test", "type1", "6").setSource("otherbody", "spaghetti") + client().prepareIndex("test").setId("1").setSource("body", "foo"), + client().prepareIndex("test").setId("2").setSource("body", "bar"), + client().prepareIndex("test").setId("3").setSource("body", "foo bar"), + client().prepareIndex("test").setId("4").setSource("body", "quux baz eggplant"), + client().prepareIndex("test").setId("5").setSource("body", "quux baz spaghetti"), + client().prepareIndex("test").setId("6").setSource("otherbody", "spaghetti") ); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar")).get(); @@ -175,10 +175,10 @@ public void testSimpleQueryStringMinimumShouldMatch() throws Exception { indexRandom( true, false, - client().prepareIndex("test", "type1", "1").setSource("body", "foo"), - client().prepareIndex("test", "type1", "2").setSource("body", "bar"), - client().prepareIndex("test", "type1", "3").setSource("body", "foo bar"), - client().prepareIndex("test", "type1", "4").setSource("body", "foo baz bar") + client().prepareIndex("test").setId("1").setSource("body", "foo"), + client().prepareIndex("test").setId("2").setSource("body", "bar"), + client().prepareIndex("test").setId("3").setSource("body", "foo bar"), + client().prepareIndex("test").setId("4").setSource("body", "foo baz bar") ); logger.info("--> query 1"); @@ -211,10 +211,10 @@ public void testSimpleQueryStringMinimumShouldMatch() throws Exception { indexRandom( true, false, - client().prepareIndex("test", "type1", "5").setSource("body2", "foo", "other", "foo"), - client().prepareIndex("test", "type1", "6").setSource("body2", "bar", "other", "foo"), - client().prepareIndex("test", "type1", "7").setSource("body2", "foo bar", "other", "foo"), - client().prepareIndex("test", "type1", "8").setSource("body2", "foo baz bar", "other", "foo") + client().prepareIndex("test").setId("5").setSource("body2", "foo", "other", "foo"), + client().prepareIndex("test").setId("6").setSource("body2", "bar", "other", "foo"), + client().prepareIndex("test").setId("7").setSource("body2", "foo bar", "other", "foo"), + client().prepareIndex("test").setId("8").setSource("body2", "foo baz bar", "other", "foo") ); logger.info("--> query 5"); @@ -257,7 +257,7 @@ public void testNestedFieldSimpleQueryString() throws IOException { .endObject() ) ); - client().prepareIndex("test", "type1", "1").setSource("body", "foo bar baz").get(); + client().prepareIndex("test").setId("1").setSource("body", "foo bar baz").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get(); @@ -281,12 +281,12 @@ public void testSimpleQueryStringFlags() throws ExecutionException, InterruptedE createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("body", "foo"), - client().prepareIndex("test", "type1", "2").setSource("body", "bar"), - client().prepareIndex("test", "type1", "3").setSource("body", "foo bar"), - client().prepareIndex("test", "type1", "4").setSource("body", "quux baz eggplant"), - client().prepareIndex("test", "type1", "5").setSource("body", "quux baz spaghetti"), - client().prepareIndex("test", "type1", "6").setSource("otherbody", "spaghetti") + client().prepareIndex("test").setId("1").setSource("body", "foo"), + client().prepareIndex("test").setId("2").setSource("body", "bar"), + client().prepareIndex("test").setId("3").setSource("body", "foo bar"), + client().prepareIndex("test").setId("4").setSource("body", "quux baz eggplant"), + client().prepareIndex("test").setId("5").setSource("body", "quux baz spaghetti"), + client().prepareIndex("test").setId("6").setSource("otherbody", "spaghetti") ); SearchResponse searchResponse = client().prepareSearch() @@ -339,8 +339,8 @@ public void testSimpleQueryStringLenient() throws ExecutionException, Interrupte createIndex("test1", "test2"); indexRandom( true, - client().prepareIndex("test1", "type1", "1").setSource("field", "foo"), - client().prepareIndex("test2", "type1", "10").setSource("field", 5) + client().prepareIndex("test1").setId("1").setSource("field", "foo"), + client().prepareIndex("test2").setId("10").setSource("field", 5) ); refresh(); @@ -362,8 +362,8 @@ public void testSimpleQueryStringLenient() throws ExecutionException, Interrupte public void testLenientFlagBeingTooLenient() throws Exception { indexRandom( true, - client().prepareIndex("test", "_doc", "1").setSource("num", 1, "body", "foo bar baz"), - client().prepareIndex("test", "_doc", "2").setSource("num", 2, "body", "eggplant spaghetti lasagna") + client().prepareIndex("test").setId("1").setSource("num", 1, "body", "foo bar baz"), + client().prepareIndex("test").setId("2").setSource("num", 2, "body", "eggplant spaghetti lasagna") ); BoolQueryBuilder q = boolQuery().should(simpleQueryStringQuery("bar").field("num").field("body").lenient(true)); @@ -395,7 +395,7 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In .prepareCreate("test1") .addMapping("type1", mapping, XContentType.JSON); mappingRequest.get(); - indexRandom(true, client().prepareIndex("test1", "type1", "1").setSource("location", "Köln")); + indexRandom(true, client().prepareIndex("test1").setId("1").setSource("location", "Köln")); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("Köln*").field("location")).get(); @@ -405,8 +405,8 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In } public void testSimpleQueryStringUsesFieldAnalyzer() throws Exception { - client().prepareIndex("test", "type1", "1").setSource("foo", 123, "bar", "abc").get(); - client().prepareIndex("test", "type1", "2").setSource("foo", 234, "bar", "bcd").get(); + client().prepareIndex("test").setId("1").setSource("foo", 123, "bar", "abc").get(); + client().prepareIndex("test").setId("2").setSource("foo", 234, "bar", "bcd").get(); refresh(); @@ -416,8 +416,8 @@ public void testSimpleQueryStringUsesFieldAnalyzer() throws Exception { } public void testSimpleQueryStringOnIndexMetaField() throws Exception { - client().prepareIndex("test", "type1", "1").setSource("foo", 123, "bar", "abc").get(); - client().prepareIndex("test", "type1", "2").setSource("foo", 234, "bar", "bcd").get(); + client().prepareIndex("test").setId("1").setSource("foo", 123, "bar", "abc").get(); + client().prepareIndex("test").setId("2").setSource("foo", 234, "bar", "bcd").get(); refresh(); @@ -447,7 +447,7 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { .prepareCreate("test1") .addMapping("type1", mapping, XContentType.JSON); mappingRequest.get(); - indexRandom(true, client().prepareIndex("test1", "type1", "1").setSource("body", "Some Text")); + indexRandom(true, client().prepareIndex("test1").setId("1").setSource("body", "Some Text")); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("the*").field("body")).get(); @@ -461,9 +461,9 @@ public void testBasicAllQuery() throws Exception { ensureGreen("test"); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo bar baz")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f2", "Bar")); - reqs.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "foo bar baz")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo bar baz")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f2", "Bar")); + reqs.add(client().prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); @@ -485,8 +485,8 @@ public void testWithDate() throws Exception { ensureGreen("test"); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo", "f_date", "2015/09/02")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "bar", "f_date", "2015/09/01")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo", "f_date", "2015/09/02")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); @@ -513,10 +513,10 @@ public void testWithLotsOfTypes() throws Exception { List reqs = new ArrayList<>(); reqs.add( - client().prepareIndex("test", "_doc", "1").setSource("f1", "foo", "f_date", "2015/09/02", "f_float", "1.7", "f_ip", "127.0.0.1") + client().prepareIndex("test").setId("1").setSource("f1", "foo", "f_date", "2015/09/02", "f_float", "1.7", "f_ip", "127.0.0.1") ); reqs.add( - client().prepareIndex("test", "_doc", "2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2") + client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2") ); indexRandom(true, false, reqs); @@ -544,7 +544,7 @@ public void testDocWithAllTypes() throws Exception { List reqs = new ArrayList<>(); String docBody = copyToStringFromClasspath("/org/opensearch/search/query/all-example-document.json"); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource(docBody, XContentType.JSON)); + reqs.add(client().prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); @@ -588,9 +588,9 @@ public void testKeywordWithWhitespace() throws Exception { ensureGreen("test"); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f2", "Foo Bar")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "bar")); - reqs.add(client().prepareIndex("test", "_doc", "3").setSource("f1", "foo bar")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f2", "Foo Bar")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar")); + reqs.add(client().prepareIndex("test").setId("3").setSource("f1", "foo bar")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); @@ -632,7 +632,7 @@ public void testLimitOnExpandedFields() throws Exception { ).addMapping("type1", builder) ); - client().prepareIndex("toomanyfields", "type1", "1").setSource("field1", "foo bar baz").get(); + client().prepareIndex("toomanyfields").setId("1").setSource("field1", "foo bar baz").get(); refresh(); doAssertLimitExceededException("*", CLUSTER_MAX_CLAUSE_COUNT + 1); @@ -657,9 +657,9 @@ public void testFieldAlias() throws Exception { ensureGreen("test"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_alias")).get(); @@ -675,9 +675,9 @@ public void testFieldAliasWithWildcardField() throws Exception { ensureGreen("test"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_*")).get(); @@ -693,7 +693,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { ensureGreen("test"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); indexRandom(true, false, indexRequests); // The wildcard field matches aliases for both a text and boolean field. diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java index 13dc97eb3daf9..3b120dcab22f2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -133,11 +133,13 @@ public void testCustomScriptBinaryField() throws Exception { .addMapping("my-type", createMappingSource("binary")) .setSettings(indexSettings()) ); - client().prepareIndex("my-index", "my-type", "1") + client().prepareIndex("my-index") + .setId("1") .setSource(jsonBuilder().startObject().field("binaryData", Base64.getEncoder().encodeToString(randomBytesDoc1)).endObject()) .get(); flush(); - client().prepareIndex("my-index", "my-type", "2") + client().prepareIndex("my-index") + .setId("2") .setSource(jsonBuilder().startObject().field("binaryData", Base64.getEncoder().encodeToString(randomBytesDoc2)).endObject()) .get(); flush(); @@ -181,15 +183,18 @@ private XContentBuilder createMappingSource(String fieldType) throws IOException public void testCustomScriptBoost() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject()) .get(); flush(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).endObject()) .get(); flush(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).endObject()) .get(); refresh(); @@ -244,7 +249,7 @@ public void testDisallowExpensiveQueries() { assertAcked(prepareCreate("test-index").addMapping("_doc", "num1", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { - client().prepareIndex("test-index", "_doc").setId("" + i).setSource("num1", i).get(); + client().prepareIndex("test-index").setId("" + i).setSource("num1", i).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java index 33899a1fb152c..be55193da30cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java @@ -155,7 +155,7 @@ private TestContext create(SearchType... searchTypes) throws Exception { } for (int i = 1; i <= numDocs; i++) { - IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index", "type", String.valueOf(i)); + IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index").setId(String.valueOf(i)); if (missingDocs.contains(i)) { indexRequestBuilder.setSource("x", "y"); } else { @@ -230,7 +230,7 @@ private int createIndex(boolean singleShard) throws Exception { IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("test", "type", Integer.toString(i)).setSource("foo", random().nextBoolean()); + builders[i] = client().prepareIndex("test").setId(Integer.toString(i)).setSource("foo", random().nextBoolean()); } indexRandom(true, builders); return numDocs; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java index a56c014a08ba5..5c56671384868 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java @@ -106,7 +106,8 @@ public void testSimpleScrollQueryThenFetch() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", i).endObject()) .get(); } @@ -161,7 +162,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } else if (i > 60) { routing = "2"; } - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", i).setRouting(routing).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", i).setRouting(routing).get(); } client().admin().indices().prepareRefresh().get(); @@ -220,7 +221,8 @@ public void testScrollAndUpdateIndex() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); for (int i = 0; i < 500; i++) { - client().prepareIndex("test", "tweet", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("user", "foobar") @@ -262,7 +264,7 @@ public void testScrollAndUpdateIndex() throws Exception { for (SearchHit searchHit : searchResponse.getHits().getHits()) { Map map = searchHit.getSourceAsMap(); map.put("message", "update"); - client().prepareIndex("test", "tweet", searchHit.getId()).setSource(map).get(); + client().prepareIndex("test").setId(searchHit.getId()).setSource(map).get(); } searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); } while (searchResponse.getHits().getHits().length > 0); @@ -297,7 +299,8 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", i).endObject()) .get(); } @@ -416,7 +419,8 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", i).endObject()) .get(); } @@ -490,7 +494,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { * Tests that we use an optimization shrinking the batch to the size of the shard. Thus the Integer.MAX_VALUE window doesn't OOM us. */ public void testDeepScrollingDoesNotBlowUp() throws Exception { - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); /* * Disable the max result window setting for this test because it'll reject the search's unreasonable batch size. We want * unreasonable batch sizes to just OOM. @@ -521,7 +525,7 @@ public void testDeepScrollingDoesNotBlowUp() throws Exception { } public void testThatNonExistingScrollIdReturnsCorrectException() throws Exception { - client().prepareIndex("index", "type", "1").setSource("field", "value").execute().get(); + client().prepareIndex("index").setId("1").setSource("field", "value").execute().get(); refresh(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); @@ -539,7 +543,7 @@ public void testStringSortMissingAscTerminates() throws Exception { Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ).addMapping("test", "no_field", "type=keyword", "some_field", "type=keyword") ); - client().prepareIndex("test", "test", "1").setSource("some_field", "test").get(); + client().prepareIndex("test").setId("1").setSource("some_field", "test").get(); refresh(); SearchResponse response = client().prepareSearch("test") @@ -569,7 +573,7 @@ public void testStringSortMissingAscTerminates() throws Exception { public void testCloseAndReopenOrDeleteWithActiveScroll() { createIndex("test"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", i).get(); } refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -660,7 +664,8 @@ public void testScrollInvalidDefaultKeepAlive() throws IOException { public void testInvalidScrollKeepAlive() throws IOException { createIndex("test"); for (int i = 0; i < 2; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", i).endObject()) .get(); } @@ -715,9 +720,9 @@ public void testScrollRewrittenToMatchNoDocs() { .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards)) .addMapping("_doc", "created_date", "type=date,format=yyyy-MM-dd") ); - client().prepareIndex("test", "_doc").setId("1").setSource("created_date", "2020-01-01").get(); - client().prepareIndex("test", "_doc").setId("2").setSource("created_date", "2020-01-02").get(); - client().prepareIndex("test", "_doc").setId("3").setSource("created_date", "2020-01-03").get(); + client().prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); + client().prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); + client().prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse resp = null; try { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java index b5609d9e51016..a56f8667fab48 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -77,7 +77,7 @@ public void testScanScrollWithShardExceptions() throws Exception { List writes = new ArrayList<>(); for (int i = 0; i < 100; i++) { - writes.add(client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", i).endObject())); + writes.add(client().prepareIndex("test").setSource(jsonBuilder().startObject().field("field", i).endObject())); } indexRandom(false, writes); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java index 8270496943cdb..b88e56b4f675d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java @@ -67,7 +67,7 @@ public void testsShouldFail() throws Exception { client().admin().indices().prepareCreate("test").addMapping("type1", "field1", "type=long", "field2", "type=keyword").get() ); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "type1", "0").setSource("field1", 0, "field2", "toto")); + indexRandom(true, client().prepareIndex("test").setId("0").setSource("field1", 0, "field2", "toto")); { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, @@ -163,8 +163,8 @@ public void testWithNullStrings() throws InterruptedException { ensureGreen(); indexRandom( true, - client().prepareIndex("test", "type1", "0").setSource("field1", 0), - client().prepareIndex("test", "type1", "1").setSource("field1", 100, "field2", "toto") + client().prepareIndex("test").setId("0").setSource("field1", 0), + client().prepareIndex("test").setId("1").setSource("field1", 100, "field2", "toto") ); SearchResponse searchResponse = client().prepareSearch("test") .addSort("field1", SortOrder.ASC) @@ -263,7 +263,7 @@ private void assertSearchFromWithSortValues(String indexName, String typeName, L builder.field("field" + Integer.toString(j), documents.get(i).get(j)); } builder.endObject(); - requests.add(client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)).setSource(builder)); + requests.add(client().prepareIndex(INDEX_NAME).setId(Integer.toString(i)).setSource(builder)); } indexRandom(true, requests); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java index 661c5bf563e9f..0652b38228ec5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java @@ -43,6 +43,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.index.IndexSettings; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.rest.RestStatus; @@ -87,12 +88,12 @@ public void testSearchRandomPreference() throws InterruptedException, ExecutionE createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("field", "value"), - client().prepareIndex("test", "type", "2").setSource("field", "value"), - client().prepareIndex("test", "type", "3").setSource("field", "value"), - client().prepareIndex("test", "type", "4").setSource("field", "value"), - client().prepareIndex("test", "type", "5").setSource("field", "value"), - client().prepareIndex("test", "type", "6").setSource("field", "value") + client().prepareIndex("test").setId("1").setSource("field", "value"), + client().prepareIndex("test").setId("2").setSource("field", "value"), + client().prepareIndex("test").setId("3").setSource("field", "value"), + client().prepareIndex("test").setId("4").setSource("field", "value"), + client().prepareIndex("test").setId("5").setSource("field", "value"), + client().prepareIndex("test").setId("6").setSource("field", "value") ); int iters = scaledRandomIntBetween(10, 20); @@ -118,11 +119,10 @@ public void testSimpleIp() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("type1") .setSource( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("from") .field("type", "ip") @@ -136,10 +136,7 @@ public void testSimpleIp() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "1") - .setSource("from", "192.168.0.5", "to", "192.168.0.10") - .setRefreshPolicy(IMMEDIATE) - .get(); + client().prepareIndex("test").setId("1").setSource("from", "192.168.0.5", "to", "192.168.0.10").setRefreshPolicy(IMMEDIATE).get(); SearchResponse search = client().prepareSearch() .setQuery(boolQuery().must(rangeQuery("from").lte("192.168.0.7")).must(rangeQuery("to").gte("192.168.0.7"))) @@ -154,11 +151,10 @@ public void testIpCidr() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("type1") .setSource( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("ip") .field("type", "ip") @@ -170,11 +166,11 @@ public void testIpCidr() throws Exception { .get(); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("ip", "192.168.0.1").get(); - client().prepareIndex("test", "type1", "2").setSource("ip", "192.168.0.2").get(); - client().prepareIndex("test", "type1", "3").setSource("ip", "192.168.0.3").get(); - client().prepareIndex("test", "type1", "4").setSource("ip", "192.168.1.4").get(); - client().prepareIndex("test", "type1", "5").setSource("ip", "2001:db8::ff00:42:8329").get(); + client().prepareIndex("test").setId("1").setSource("ip", "192.168.0.1").get(); + client().prepareIndex("test").setId("2").setSource("ip", "192.168.0.2").get(); + client().prepareIndex("test").setId("3").setSource("ip", "192.168.0.3").get(); + client().prepareIndex("test").setId("4").setSource("ip", "192.168.1.4").get(); + client().prepareIndex("test").setId("5").setSource("ip", "2001:db8::ff00:42:8329").get(); refresh(); SearchResponse search = client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1"))).get(); @@ -217,7 +213,7 @@ public void testIpCidr() throws Exception { public void testSimpleId() { createIndex("test"); - client().prepareIndex("test", "type", "XXX1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("XXX1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); // id is not indexed, but lets see that we automatically convert to SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.termQuery("_id", "XXX1")).get(); assertHitCount(searchResponse, 1L); @@ -228,8 +224,8 @@ public void testSimpleId() { public void testSimpleDateRange() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field", "2010-01-05T02:00").get(); - client().prepareIndex("test", "type1", "2").setSource("field", "2010-01-06T02:00").get(); + client().prepareIndex("test").setId("1").setSource("field", "2010-01-05T02:00").get(); + client().prepareIndex("test").setId("2").setSource("field", "2010-01-06T02:00").get(); ensureGreen(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -270,7 +266,7 @@ public void testSimpleTerminateAfterCount() throws Exception { for (int i = 1; i <= max; i++) { String id = String.valueOf(i); - docbuilders.add(client().prepareIndex("test", "type1", id).setSource("field", i)); + docbuilders.add(client().prepareIndex("test").setId(id).setSource("field", i)); } indexRandom(true, docbuilders); @@ -299,14 +295,14 @@ public void testSimpleTerminateAfterCount() throws Exception { public void testSimpleIndexSortEarlyTerminate() throws Exception { prepareCreate("test").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).put("index.sort.field", "rank") - ).addMapping("type1", "rank", "type=integer").get(); + ).addMapping(MapperService.SINGLE_MAPPING_NAME, "rank", "type=integer").get(); ensureGreen(); int max = randomIntBetween(3, 29); List docbuilders = new ArrayList<>(max); for (int i = max - 1; i >= 0; i--) { String id = String.valueOf(i); - docbuilders.add(client().prepareIndex("test", "type1", id).setSource("rank", i)); + docbuilders.add(client().prepareIndex("test").setId(id).setSource("rank", i)); } indexRandom(true, docbuilders); @@ -330,7 +326,7 @@ public void testSimpleIndexSortEarlyTerminate() throws Exception { public void testInsaneFromAndSize() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertWindowFails(client().prepareSearch("idx").setFrom(Integer.MAX_VALUE)); assertWindowFails(client().prepareSearch("idx").setSize(Integer.MAX_VALUE)); @@ -338,7 +334,7 @@ public void testInsaneFromAndSize() throws Exception { public void testTooLargeFromAndSize() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertWindowFails(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); assertWindowFails(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1)); @@ -351,7 +347,7 @@ public void testTooLargeFromAndSize() throws Exception { public void testLargeFromAndSizeSucceeds() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) - 10).get(), 1); assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); @@ -369,7 +365,7 @@ public void testTooLargeFromAndSizeOkBySetting() throws Exception { Settings.builder() .put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 2) ).get(); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1).get(), 1); @@ -397,7 +393,7 @@ public void testTooLargeFromAndSizeOkByDynamicSetting() throws Exception { ) .get() ); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1).get(), 1); @@ -412,7 +408,7 @@ public void testTooLargeFromAndSizeOkByDynamicSetting() throws Exception { public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws Exception { prepareCreate("idx").setSettings(Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE)).get(); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); @@ -427,7 +423,7 @@ public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws public void testTooLargeRescoreWindow() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertRescoreWindowFails(Integer.MAX_VALUE); assertRescoreWindowFails(IndexSettings.MAX_RESCORE_WINDOW_SETTING.get(Settings.EMPTY) + 1); @@ -437,7 +433,7 @@ public void testTooLargeRescoreOkBySetting() throws Exception { int defaultMaxWindow = IndexSettings.MAX_RESCORE_WINDOW_SETTING.get(Settings.EMPTY); prepareCreate("idx").setSettings(Settings.builder().put(IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey(), defaultMaxWindow * 2)) .get(); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount( client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(), @@ -454,7 +450,7 @@ public void testTooLargeRescoreOkByResultWindowSetting() throws Exception { defaultMaxWindow * 2 ) ).get(); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount( client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(), @@ -472,7 +468,7 @@ public void testTooLargeRescoreOkByDynamicSetting() throws Exception { .setSettings(Settings.builder().put(IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey(), defaultMaxWindow * 2)) .get() ); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount( client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(), @@ -493,7 +489,7 @@ public void testTooLargeRescoreOkByDynamicResultWindowSetting() throws Exception ) .get() ); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount( client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(), @@ -517,7 +513,7 @@ public void testTermQueryBigInt() throws Exception { prepareCreate("idx").addMapping("type", "field", "type=keyword").get(); ensureGreen("idx"); - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setId("1") .setSource("{\"field\" : 80315953321748200608 }", XContentType.JSON) .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -533,7 +529,7 @@ public void testTermQueryBigInt() throws Exception { public void testTooLongRegexInRegexpQuery() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); int defaultMaxRegexLength = IndexSettings.MAX_REGEX_LENGTH_SETTING.get(Settings.EMPTY); StringBuilder regexp = new StringBuilder(defaultMaxRegexLength); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index be5506291a2c1..c4697e63cb4f7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -102,7 +102,7 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe .field("static_int", 0) .field("invalid_random_int", randomInt()) .endObject(); - requests.add(client().prepareIndex("test", "type").setSource(builder)); + requests.add(client().prepareIndex("test").setSource(builder)); } indexRandom(true, requests); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index 643a7875c0295..92dfedeb99a23 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -142,7 +142,7 @@ public void testIssue8226() { assertAcked(prepareCreate("test_" + i).addAlias(new Alias("test"))); } if (i > 0) { - client().prepareIndex("test_" + i, "foo", "" + i).setSource("{\"entry\": " + i + "}", XContentType.JSON).get(); + client().prepareIndex("test_" + i).setId("" + i).setSource("{\"entry\": " + i + "}", XContentType.JSON).get(); } } refresh(); @@ -188,7 +188,7 @@ public void testIssue6614() throws ExecutionException, InterruptedException { final int numDocs = randomIntBetween(1, 23); // hour of the day for (int j = 0; j < numDocs; j++) { builders.add( - client().prepareIndex(indexId, "type") + client().prepareIndex(indexId) .setSource( "foo", "bar", @@ -312,7 +312,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut sparseBytes.put(ref, docId); } src.endObject(); - builders[i] = client().prepareIndex("test", "type", docId).setSource(src); + builders[i] = client().prepareIndex("test").setId(docId).setSource(src); } indexRandom(true, builders); { @@ -361,7 +361,7 @@ public void test3078() { ensureGreen(); for (int i = 1; i < 101; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", Integer.toString(i)).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", Integer.toString(i)).get(); } refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -373,7 +373,7 @@ public void test3078() { assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); // reindex and refresh - client().prepareIndex("test", "type", Integer.toString(1)).setSource("field", Integer.toString(1)).get(); + client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); refresh(); searchResponse = client().prepareSearch("test") @@ -385,7 +385,7 @@ public void test3078() { assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); // reindex - no refresh - client().prepareIndex("test", "type", Integer.toString(1)).setSource("field", Integer.toString(1)).get(); + client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); searchResponse = client().prepareSearch("test") .setQuery(matchAllQuery()) @@ -399,7 +399,7 @@ public void test3078() { forceMerge(); refresh(); - client().prepareIndex("test", "type", Integer.toString(1)).setSource("field", Integer.toString(1)).get(); + client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); searchResponse = client().prepareSearch("test") .setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) @@ -422,9 +422,9 @@ public void testScoreSortDirection() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", 2).get(); - client().prepareIndex("test", "type", "2").setSource("field", 1).get(); - client().prepareIndex("test", "type", "3").setSource("field", 0).get(); + client().prepareIndex("test").setId("1").setSource("field", 2).get(); + client().prepareIndex("test").setId("2").setSource("field", 1).get(); + client().prepareIndex("test").setId("3").setSource("field", 0).get(); refresh(); @@ -460,9 +460,9 @@ public void testScoreSortDirectionWithFunctionScore() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", 2).get(); - client().prepareIndex("test", "type", "2").setSource("field", 1).get(); - client().prepareIndex("test", "type", "3").setSource("field", 0).get(); + client().prepareIndex("test").setId("1").setSource("field", 2).get(); + client().prepareIndex("test").setId("2").setSource("field", 1).get(); + client().prepareIndex("test").setId("3").setSource("field", 0).get(); refresh(); @@ -497,9 +497,9 @@ public void testScoreSortDirectionWithFunctionScore() throws Exception { public void testIssue2986() { assertAcked(client().admin().indices().prepareCreate("test").addMapping("post", "field1", "type=keyword").get()); - client().prepareIndex("test", "post", "1").setSource("{\"field1\":\"value1\"}", XContentType.JSON).get(); - client().prepareIndex("test", "post", "2").setSource("{\"field1\":\"value2\"}", XContentType.JSON).get(); - client().prepareIndex("test", "post", "3").setSource("{\"field1\":\"value3\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"field1\":\"value1\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{\"field1\":\"value2\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("3").setSource("{\"field1\":\"value3\"}", XContentType.JSON).get(); refresh(); SearchResponse result = client().prepareSearch("test") .setQuery(matchAllQuery()) @@ -521,16 +521,16 @@ public void testIssue2991() { } assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "tag", "type=keyword").get()); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("tag", "alpha").get(); + client().prepareIndex("test").setId("1").setSource("tag", "alpha").get(); refresh(); - client().prepareIndex("test", "type", "3").setSource("tag", "gamma").get(); + client().prepareIndex("test").setId("3").setSource("tag", "gamma").get(); refresh(); - client().prepareIndex("test", "type", "4").setSource("tag", "delta").get(); + client().prepareIndex("test").setId("4").setSource("tag", "delta").get(); refresh(); - client().prepareIndex("test", "type", "2").setSource("tag", "beta").get(); + client().prepareIndex("test").setId("2").setSource("tag", "beta").get(); refresh(); SearchResponse resp = client().prepareSearch("test") @@ -596,7 +596,8 @@ public void testSimpleSorts() throws Exception { ensureGreen(); List builders = new ArrayList<>(); for (int i = 0; i < 10; i++) { - IndexRequestBuilder builder = client().prepareIndex("test", "type1", Integer.toString(i)) + IndexRequestBuilder builder = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("str_value", new String(new char[] { (char) (97 + i), (char) (97 + i) })) @@ -818,13 +819,15 @@ public void testSortMissingNumbers() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("i_value", -1).field("d_value", -1.1).endObject()) .get(); - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("id", "2").endObject()).get(); + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("id", "2").endObject()).get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("id", "1").field("i_value", 2).field("d_value", 2.2).endObject()) .get(); @@ -885,13 +888,15 @@ public void testSortMissingStrings() throws IOException { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("value", "a").endObject()) .get(); - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("id", "2").endObject()).get(); + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("id", "2").endObject()).get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("id", "1").field("value", "c").endObject()) .get(); @@ -957,7 +962,8 @@ public void testSortMissingStrings() throws IOException { public void testIgnoreUnmapped() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("i_value", -1).field("d_value", -1.1).endObject()) .get(); @@ -1037,7 +1043,8 @@ public void testSortMVField() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", Integer.toString(1)) + client().prepareIndex("test") + .setId(Integer.toString(1)) .setSource( jsonBuilder().startObject() .array("long_values", 1L, 5L, 10L, 8L) @@ -1050,7 +1057,8 @@ public void testSortMVField() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", Integer.toString(2)) + client().prepareIndex("test") + .setId(Integer.toString(2)) .setSource( jsonBuilder().startObject() .array("long_values", 11L, 15L, 20L, 7L) @@ -1063,7 +1071,8 @@ public void testSortMVField() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", Integer.toString(3)) + client().prepareIndex("test") + .setId(Integer.toString(3)) .setSource( jsonBuilder().startObject() .array("long_values", 2L, 1L, 3L, -4L) @@ -1351,7 +1360,8 @@ public void testSortOnRareField() throws IOException { ) ); ensureGreen(); - client().prepareIndex("test", "type1", Integer.toString(1)) + client().prepareIndex("test") + .setId(Integer.toString(1)) .setSource(jsonBuilder().startObject().array("string_values", "01", "05", "10", "08").endObject()) .get(); @@ -1367,11 +1377,13 @@ public void testSortOnRareField() throws IOException { assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(1))); assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo("10")); - client().prepareIndex("test", "type1", Integer.toString(2)) + client().prepareIndex("test") + .setId(Integer.toString(2)) .setSource(jsonBuilder().startObject().array("string_values", "11", "15", "20", "07").endObject()) .get(); for (int i = 0; i < 15; i++) { - client().prepareIndex("test", "type1", Integer.toString(300 + i)) + client().prepareIndex("test") + .setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); } @@ -1387,11 +1399,13 @@ public void testSortOnRareField() throws IOException { assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo("10")); - client().prepareIndex("test", "type1", Integer.toString(3)) + client().prepareIndex("test") + .setId(Integer.toString(3)) .setSource(jsonBuilder().startObject().array("string_values", "02", "01", "03", "!4").endObject()) .get(); for (int i = 0; i < 15; i++) { - client().prepareIndex("test", "type1", Integer.toString(300 + i)) + client().prepareIndex("test") + .setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); } @@ -1411,7 +1425,8 @@ public void testSortOnRareField() throws IOException { assertThat(searchResponse.getHits().getAt(2).getSortValues()[0], equalTo("03")); for (int i = 0; i < 15; i++) { - client().prepareIndex("test", "type1", Integer.toString(300 + i)) + client().prepareIndex("test") + .setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); refresh(); @@ -1443,7 +1458,7 @@ public void testSortMetaField() throws Exception { final int numDocs = randomIntBetween(10, 20); IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; ++i) { - indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i)).setSource(); + indexReqs[i] = client().prepareIndex("test").setId(Integer.toString(i)).setSource(); } indexRandom(true, indexReqs); @@ -1520,7 +1535,8 @@ public void testNestedSort() throws IOException, InterruptedException, Execution ); ensureGreen(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("nested") @@ -1534,7 +1550,8 @@ public void testNestedSort() throws IOException, InterruptedException, Execution .endObject() ) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .startArray("nested") @@ -1625,7 +1642,7 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception for (String index : new String[] { "test1", "test2" }) { List docs = new ArrayList<>(); for (int i = 0; i < 256; i++) { - docs.add(client().prepareIndex(index, "type", Integer.toString(i)).setSource(sortField, i)); + docs.add(client().prepareIndex(index).setId(Integer.toString(i)).setSource(sortField, i)); } indexRandom(true, docs); } @@ -1657,8 +1674,8 @@ public void testCustomFormat() throws Exception { assertAcked(prepareCreate("test").addMapping("type", "ip", "type=ip")); indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("test", "type", "2").setSource("ip", "2001:db8::ff00:42:8329") + client().prepareIndex("test").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("test").setId("2").setSource("ip", "2001:db8::ff00:42:8329") ); SearchResponse response = client().prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).get(); @@ -1681,7 +1698,7 @@ public void testScriptFieldSort() throws Exception { IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs]; List keywords = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { - indexReqs[i] = client().prepareIndex("test", "t").setSource("number", i, "keyword", Integer.toString(i)); + indexReqs[i] = client().prepareIndex("test").setSource("number", i, "keyword", Integer.toString(i)); keywords.add(Integer.toString(i)); } Collections.sort(keywords); @@ -1732,9 +1749,9 @@ public void testFieldAlias() throws Exception { ensureGreen("old_index", "new_index"); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 50.5)); - builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + builders.add(client().prepareIndex("old_index").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index").setSource("distance", 50.5)); + builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); indexRandom(true, true, builders); SearchResponse response = client().prepareSearch() @@ -1760,9 +1777,9 @@ public void testFieldAliasesWithMissingValues() throws Exception { ensureGreen("old_index", "new_index"); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); - builders.add(client().prepareIndex("old_index", "_doc").setSource(Collections.emptyMap())); - builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + builders.add(client().prepareIndex("old_index").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index").setSource(Collections.emptyMap())); + builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); indexRandom(true, true, builders); SearchResponse response = client().prepareSearch() @@ -1785,9 +1802,9 @@ public void testCastNumericType() throws Exception { ensureGreen("index_double", "index_long", "index_float"); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("index_double", "_doc").setSource("field", 12.6)); - builders.add(client().prepareIndex("index_long", "_doc").setSource("field", 12)); - builders.add(client().prepareIndex("index_float", "_doc").setSource("field", 12.1)); + builders.add(client().prepareIndex("index_double").setSource("field", 12.6)); + builders.add(client().prepareIndex("index_long").setSource("field", 12)); + builders.add(client().prepareIndex("index_float").setSource("field", 12.1)); indexRandom(true, true, builders); { @@ -1830,8 +1847,8 @@ public void testCastDate() throws Exception { ensureGreen("index_date", "index_date_nanos"); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("index_date", "_doc").setSource("field", "2024-04-11T23:47:17")); - builders.add(client().prepareIndex("index_date_nanos", "_doc").setSource("field", "2024-04-11T23:47:16.854775807Z")); + builders.add(client().prepareIndex("index_date").setSource("field", "2024-04-11T23:47:17")); + builders.add(client().prepareIndex("index_date_nanos").setSource("field", "2024-04-11T23:47:16.854775807Z")); indexRandom(true, true, builders); { @@ -1913,7 +1930,7 @@ public void testCastDate() throws Exception { { builders.clear(); - builders.add(client().prepareIndex("index_date", "_doc").setSource("field", "1905-04-11T23:47:17")); + builders.add(client().prepareIndex("index_date").setSource("field", "1905-04-11T23:47:17")); indexRandom(true, true, builders); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) @@ -1927,7 +1944,7 @@ public void testCastDate() throws Exception { { builders.clear(); - builders.add(client().prepareIndex("index_date", "_doc").setSource("field", "2346-04-11T23:47:17")); + builders.add(client().prepareIndex("index_date").setSource("field", "2346-04-11T23:47:17")); indexRandom(true, true, builders); SearchResponse response = client().prepareSearch() .setQuery(QueryBuilders.rangeQuery("field").gt("1970-01-01")) @@ -1972,7 +1989,7 @@ public void testLongSortOptimizationCorrectResults() { bulkBuilder = client().prepareBulk(); } String source = "{\"long_field\":" + randomLong() + "}"; - bulkBuilder.add(client().prepareIndex("test1", "_doc").setId(Integer.toString(i)).setSource(source, XContentType.JSON)); + bulkBuilder.add(client().prepareIndex("test1").setId(Integer.toString(i)).setSource(source, XContentType.JSON)); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java index f964baead2534..1739add2ff5e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java @@ -80,7 +80,8 @@ public void testDistanceSortingMVFields() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("names", "New York") @@ -92,7 +93,8 @@ public void testDistanceSortingMVFields() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("names", "New York 2") @@ -104,7 +106,8 @@ public void testDistanceSortingMVFields() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .array("names", "Times Square", "Tribeca") @@ -124,7 +127,8 @@ public void testDistanceSortingMVFields() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .array("names", "Wall Street", "Soho") @@ -144,7 +148,8 @@ public void testDistanceSortingMVFields() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "5") + client().prepareIndex("test") + .setId("5") .setSource( jsonBuilder().startObject() .array("names", "Greenwich Village", "Brooklyn") @@ -271,7 +276,8 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .array("names", "Times Square", "Tribeca") @@ -291,7 +297,8 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().array("names", "Wall Street", "Soho").endObject()) .get(); @@ -346,7 +353,8 @@ public void testDistanceSortingNestedFields() throws Exception { indexRandom( true, - client().prepareIndex("companies", "company", "1") + client().prepareIndex("companies") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "company 1") @@ -361,7 +369,8 @@ public void testDistanceSortingNestedFields() throws Exception { .endArray() .endObject() ), - client().prepareIndex("companies", "company", "2") + client().prepareIndex("companies") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "company 2") @@ -385,7 +394,8 @@ public void testDistanceSortingNestedFields() throws Exception { .endArray() .endObject() ), - client().prepareIndex("companies", "company", "3") + client().prepareIndex("companies") + .setId("3") .setSource( jsonBuilder().startObject() .field("name", "company 3") @@ -408,7 +418,8 @@ public void testDistanceSortingNestedFields() throws Exception { .endArray() .endObject() ), - client().prepareIndex("companies", "company", "4") + client().prepareIndex("companies") + .setId("4") .setSource( jsonBuilder().startObject() .field("name", "company 4") @@ -588,7 +599,7 @@ public void testGeoDistanceFilter() throws IOException { XContentBuilder source = JsonXContent.contentBuilder().startObject().field("pin", Geohash.stringEncode(lon, lat)).endObject(); assertAcked(prepareCreate("locations").setSettings(settings).addMapping("location", mapping)); - client().prepareIndex("locations", "location", "1").setCreate(true).setSource(source).get(); + client().prepareIndex("locations").setId("1").setCreate(true).setSource(source).get(); refresh(); client().prepareGet("locations", "1").get(); @@ -612,7 +623,8 @@ public void testDistanceSortingWithUnmappedField() throws Exception { assertAcked(prepareCreate("test2")); ensureGreen(); - client().prepareIndex("test1", "type1", "1") + client().prepareIndex("test1") + .setId("1") .setSource( jsonBuilder().startObject() .array("names", "Times Square", "Tribeca") @@ -632,7 +644,8 @@ public void testDistanceSortingWithUnmappedField() throws Exception { ) .get(); - client().prepareIndex("test2", "type1", "2") + client().prepareIndex("test2") + .setId("2") .setSource(jsonBuilder().startObject().array("names", "Wall Street", "Soho").endObject()) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java index 74204950a11c9..c283444666f0b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java @@ -96,8 +96,8 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce logger.info("d2: {}", d2Builder); indexRandom( true, - client().prepareIndex("index", "type", "d1").setSource(d1Builder), - client().prepareIndex("index", "type", "d2").setSource(d2Builder) + client().prepareIndex("index").setId("d1").setSource(d1Builder), + client().prepareIndex("index").setId("d2").setSource(d2Builder) ); GeoPoint[] q = new GeoPoint[2]; if (randomBoolean()) { @@ -187,8 +187,8 @@ public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedExc logger.info("d2: {}", d2Builder); indexRandom( true, - client().prepareIndex("index", "type", "d1").setSource(d1Builder), - client().prepareIndex("index", "type", "d2").setSource(d2Builder) + client().prepareIndex("index").setId("d1").setSource(d1Builder), + client().prepareIndex("index").setId("d2").setSource(d2Builder) ); GeoPoint q = new GeoPoint(0, 0); @@ -259,8 +259,8 @@ public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionExcept indexRandom( true, - client().prepareIndex("index", "type", "d1").setSource(d1Builder), - client().prepareIndex("index", "type", "d2").setSource(d2Builder) + client().prepareIndex("index").setId("d1").setSource(d1Builder), + client().prepareIndex("index").setId("d2").setSource(d2Builder) ); List qPoints = Arrays.asList(new GeoPoint(2, 1), new GeoPoint(2, 2), new GeoPoint(2, 3), new GeoPoint(2, 4)); @@ -309,9 +309,11 @@ public void testSinglePointGeoDistanceSort() throws ExecutionException, Interrup assertAcked(prepareCreate("index").addMapping("type", LOCATION_FIELD, "type=geo_point")); indexRandom( true, - client().prepareIndex("index", "type", "d1") + client().prepareIndex("index") + .setId("d1") .setSource(jsonBuilder().startObject().startObject(LOCATION_FIELD).field("lat", 1).field("lon", 1).endObject().endObject()), - client().prepareIndex("index", "type", "d2") + client().prepareIndex("index") + .setId("d2") .setSource(jsonBuilder().startObject().startObject(LOCATION_FIELD).field("lat", 1).field("lon", 2).endObject().endObject()) ); @@ -387,8 +389,8 @@ public void testCrossIndexIgnoreUnmapped() throws Exception { indexRandom( true, - client().prepareIndex("test1", "type").setSource("str_field", "bcd", "long_field", 3, "double_field", 0.65), - client().prepareIndex("test2", "type").setSource() + client().prepareIndex("test1").setSource("str_field", "bcd", "long_field", 3, "double_field", 0.65), + client().prepareIndex("test2").setSource() ); SearchResponse resp = client().prepareSearch("test1", "test2") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index 62271cb023fde..b4f511c3be123 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -173,7 +173,8 @@ public void testSimpleSorts() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 10; i++) { builders.add( - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("str_value", new String(new char[] { (char) (97 + i), (char) (97 + i) })) @@ -265,7 +266,8 @@ public void testSortMinValueScript() throws IOException { ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource( jsonBuilder().startObject() .field("ord", i) @@ -282,7 +284,7 @@ public void testSortMinValueScript() throws IOException { } for (int i = 10; i < 20; i++) { // add some docs that don't have values in those fields - client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder().startObject().field("ord", i).endObject()).get(); + client().prepareIndex("test").setId("" + i).setSource(jsonBuilder().startObject().field("ord", i).endObject()).get(); } client().admin().indices().prepareRefresh("test").get(); @@ -372,17 +374,11 @@ public void testDocumentsWithNullValue() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource(jsonBuilder().startObject().field("id", "1").field("svalue", "aaa").endObject()) - .get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("id", "1").field("svalue", "aaa").endObject()).get(); - client().prepareIndex("test", "type1") - .setSource(jsonBuilder().startObject().field("id", "2").nullField("svalue").endObject()) - .get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("id", "2").nullField("svalue").endObject()).get(); - client().prepareIndex("test", "type1") - .setSource(jsonBuilder().startObject().field("id", "3").field("svalue", "bbb").endObject()) - .get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("id", "3").field("svalue", "bbb").endObject()).get(); flush(); refresh(); @@ -470,7 +466,8 @@ public void test2920() throws IOException { ); ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "test", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("value", "" + i).endObject()) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java index b36168f2a110f..e9fc1c54ad234 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java @@ -33,9 +33,9 @@ public void testPluginSort() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", 2).get(); - client().prepareIndex("test", "type", "2").setSource("field", 1).get(); - client().prepareIndex("test", "type", "3").setSource("field", 0).get(); + client().prepareIndex("test").setId("1").setSource("field", 2).get(); + client().prepareIndex("test").setId("2").setSource("field", 1).get(); + client().prepareIndex("test").setId("3").setSource("field", 0).get(); refresh(); @@ -54,9 +54,9 @@ public void testPluginSortXContent() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", 2).get(); - client().prepareIndex("test", "type", "2").setSource("field", 1).get(); - client().prepareIndex("test", "type", "3").setSource("field", 0).get(); + client().prepareIndex("test").setId("1").setSource("field", 2).get(); + client().prepareIndex("test").setId("2").setSource("field", 1).get(); + client().prepareIndex("test").setId("3").setSource("field", 0).get(); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java index 438089ba65bd0..758d749f0be8e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java @@ -55,7 +55,7 @@ public void testSimple() { assertAcked(prepareCreate("test")); ensureGreen(); - client().prepareIndex("test", "_doc", "1").setSource("field", "value").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); SearchResponse response = client().prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true).get(); @@ -71,7 +71,7 @@ public void testSimple() { public void testInnerHits() { assertAcked(prepareCreate("test").addMapping("_doc", "nested", "type=nested")); ensureGreen(); - client().prepareIndex("test", "_doc", "1").setSource("field", "value", "nested", Collections.singletonMap("title", "foo")).get(); + client().prepareIndex("test").setId("1").setSource("field", "value", "nested", Collections.singletonMap("title", "foo")).get(); refresh(); SearchResponse response = client().prepareSearch("test") @@ -98,7 +98,7 @@ public void testWithRouting() { assertAcked(prepareCreate("test")); ensureGreen(); - client().prepareIndex("test", "_doc", "1").setSource("field", "value").setRouting("toto").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").setRouting("toto").get(); refresh(); SearchResponse response = client().prepareSearch("test").storedFields("_none_").setFetchSource(false).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java index 266cccc08ef18..11223d11ff30d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java @@ -62,7 +62,7 @@ public void testSourceFiltering() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("field1", "value", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value", "field2", "value2").get(); refresh(); SearchResponse response = client().prepareSearch("test").setFetchSource(false).get(); @@ -95,7 +95,7 @@ public void testSourceWithWildcardFiltering() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("field", "value").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); SearchResponse response = client().prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java index 66c56f654e34f..c72b5d40553b3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java @@ -111,7 +111,7 @@ public void testSimpleStats() throws Exception { ); int docsTest1 = scaledRandomIntBetween(3 * shardsIdx1, 5 * shardsIdx1); for (int i = 0; i < docsTest1; i++) { - client().prepareIndex("test1", "type", Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("test1").setId(Integer.toString(i)).setSource("field", "value").get(); if (rarely()) { refresh(); } @@ -123,7 +123,7 @@ public void testSimpleStats() throws Exception { ); int docsTest2 = scaledRandomIntBetween(3 * shardsIdx2, 5 * shardsIdx2); for (int i = 0; i < docsTest2; i++) { - client().prepareIndex("test2", "type", Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("test2").setId(Integer.toString(i)).setSource("field", "value").get(); if (rarely()) { refresh(); } @@ -207,7 +207,8 @@ public void testOpenContexts() { final int docs = scaledRandomIntBetween(20, 50); for (int s = 0; s < numAssignedShards(index); s++) { for (int i = 0; i < docs; i++) { - client().prepareIndex(index, "type", Integer.toString(s * docs + i)) + client().prepareIndex(index) + .setId(Integer.toString(s * docs + i)) .setSource("field", "value") .setRouting(Integer.toString(s)) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java index c42602fc6c569..099ffbc278f81 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java @@ -49,6 +49,7 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.plugins.Plugin; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -96,7 +97,6 @@ @SuppressCodecs("*") // requires custom completion format public class CompletionSuggestSearchIT extends OpenSearchIntegTestCase { private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); - private final String TYPE = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); @@ -117,7 +117,8 @@ public void testTieBreak() throws Exception { String value = "a" + randomAlphaOfLengthBetween(1, 10); entries[i] = value; indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject().startObject(FIELD).field("input", value).field("weight", 10).endObject().endObject() ) @@ -139,7 +140,8 @@ public void testPrefix() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -166,7 +168,8 @@ public void testTextAndGlobalText() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -202,7 +205,8 @@ public void testRegex() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -225,7 +229,8 @@ public void testFuzzy() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -248,7 +253,8 @@ public void testEarlyTermination() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -282,7 +288,8 @@ public void testSuggestDocument() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -317,7 +324,8 @@ public void testSuggestDocumentNoSource() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -355,7 +363,8 @@ public void testSuggestDocumentSourceFiltering() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -419,7 +428,8 @@ public void testThatWeightsAreWorking() throws Exception { List similarNames = Arrays.asList("the", "The Prodigy", "The Verve", "The the"); // the weight is 1000 divided by string length, so the results are easy to to check for (String similarName : similarNames) { - client().prepareIndex(INDEX, TYPE, similarName) + client().prepareIndex(INDEX) + .setId(similarName) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -443,7 +453,8 @@ public void testThatWeightMustBeAnInteger() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> client().prepareIndex(INDEX, TYPE, "1") + () -> client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -462,7 +473,8 @@ public void testThatWeightMustBeAnInteger() throws Exception { public void testThatWeightCanBeAString() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -500,7 +512,8 @@ public void testThatWeightMustNotBeANonNumberString() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> client().prepareIndex(INDEX, TYPE, "1") + () -> client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -523,7 +536,8 @@ public void testThatWeightAsStringMustBeInt() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> client().prepareIndex(INDEX, TYPE, "1") + () -> client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -542,7 +556,8 @@ public void testThatWeightAsStringMustBeInt() throws Exception { public void testThatInputCanBeAStringInsteadOfAnArray() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource(jsonBuilder().startObject().startObject(FIELD).field("input", "Foo Fighters").endObject().endObject()) .get(); @@ -555,7 +570,8 @@ public void testDisabledPreserveSeparators() throws Exception { completionMappingBuilder.preserveSeparators(false); createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -568,7 +584,8 @@ public void testDisabledPreserveSeparators() throws Exception { ) .get(); - client().prepareIndex(INDEX, TYPE, "2") + client().prepareIndex(INDEX) + .setId("2") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -590,13 +607,15 @@ public void testEnabledPreserveSeparators() throws Exception { completionMappingBuilder.preserveSeparators(true); createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Foo Fighters").endArray().endObject().endObject() ) .get(); - client().prepareIndex(INDEX, TYPE, "2") + client().prepareIndex(INDEX) + .setId("2") .setSource(jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Foof").endArray().endObject().endObject()) .get(); @@ -608,7 +627,8 @@ public void testEnabledPreserveSeparators() throws Exception { public void testThatMultipleInputsAreSupported() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -630,7 +650,8 @@ public void testThatMultipleInputsAreSupported() throws Exception { public void testThatShortSyntaxIsWorking() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startArray(FIELD).value("The Prodigy Firestarter").value("Firestarter").endArray().endObject() ) @@ -647,7 +668,8 @@ public void testThatDisablingPositionIncrementsWorkForStopwords() throws Excepti completionMappingBuilder.searchAnalyzer("classic").indexAnalyzer("classic").preservePositionIncrements(false); createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("The Beatles").endArray().endObject().endObject() ) @@ -660,7 +682,7 @@ public void testThatDisablingPositionIncrementsWorkForStopwords() throws Excepti public void testThatUpgradeToMultiFieldsWorks() throws Exception { final XContentBuilder mapping = jsonBuilder().startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "text") @@ -668,8 +690,9 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); - client().prepareIndex(INDEX, TYPE, "1") + assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + client().prepareIndex(INDEX) + .setId("1") .setRefreshPolicy(IMMEDIATE) .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()) .get(); @@ -678,10 +701,9 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping(INDEX) - .setType(TYPE) .setSource( jsonBuilder().startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "text") @@ -706,7 +728,8 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { .get(); assertSuggestions(searchResponse, "suggs"); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setRefreshPolicy(IMMEDIATE) .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()) .get(); @@ -723,7 +746,8 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { public void testThatFuzzySuggesterWorks() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -750,7 +774,8 @@ public void testThatFuzzySuggesterWorks() throws Exception { public void testThatFuzzySuggesterSupportsEditDistances() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -784,7 +809,8 @@ public void testThatFuzzySuggesterSupportsEditDistances() throws Exception { public void testThatFuzzySuggesterSupportsTranspositions() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -818,7 +844,8 @@ public void testThatFuzzySuggesterSupportsTranspositions() throws Exception { public void testThatFuzzySuggesterSupportsMinPrefixLength() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -854,7 +881,8 @@ public void testThatFuzzySuggesterSupportsMinPrefixLength() throws Exception { public void testThatFuzzySuggesterSupportsNonPrefixLength() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -890,7 +918,8 @@ public void testThatFuzzySuggesterSupportsNonPrefixLength() throws Exception { public void testThatFuzzySuggesterIsUnicodeAware() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource(jsonBuilder().startObject().startObject(FIELD).startArray("input").value("ööööö").endArray().endObject().endObject()) .get(); @@ -937,10 +966,9 @@ public void testThatStatsAreWorking() throws Exception { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping(INDEX) - .setType(TYPE) .setSource( jsonBuilder().startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -958,10 +986,12 @@ public void testThatStatsAreWorking() throws Exception { assertThat(putMappingResponse.isAcknowledged(), is(true)); // Index two entities - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").field(otherField, "WHATEVER").endObject()) .get(); - client().prepareIndex(INDEX, TYPE, "2") + client().prepareIndex(INDEX) + .setId("2") .setSource(jsonBuilder().startObject().field(FIELD, "Bar Fighters").field(otherField, "WHATEVER2").endObject()) .get(); @@ -1021,7 +1051,8 @@ public void testThatStatsAreWorking() throws Exception { public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -1050,7 +1081,8 @@ public void testThatSuggestStopFilterWorks() throws Exception { completionMappingBuilder.indexAnalyzer("simple"); createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1064,7 +1096,8 @@ public void testThatSuggestStopFilterWorks() throws Exception { .get(); // Higher weight so it's ranked first: - client().prepareIndex(INDEX, TYPE, "2") + client().prepareIndex(INDEX) + .setId("2") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1096,7 +1129,8 @@ public void testThatIndexingInvalidFieldsInCompletionFieldResultsInException() t createIndexAndMapping(completionMappingBuilder); try { - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1128,7 +1162,7 @@ public void testSkipDuplicates() throws Exception { weights[id] = Math.max(weight, weights[id]); String suggestion = "suggestion-" + String.format(Locale.ENGLISH, "%03d", id); indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE) + client().prepareIndex(INDEX) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1252,7 +1286,6 @@ private static List getNames(Suggest.Suggestion.Entry client().prepareIndex(INDEX, TYPE, "1") + () -> client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1414,9 +1450,9 @@ public void testIssue5930() throws IOException { .indices() .prepareCreate(INDEX) .addMapping( - TYPE, + MapperService.SINGLE_MAPPING_NAME, jsonBuilder().startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -1428,7 +1464,8 @@ public void testIssue5930() throws IOException { .get() ); String string = "foo bar"; - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource(jsonBuilder().startObject().field(FIELD, string).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -1451,7 +1488,8 @@ public void testMultiDocSuggestions() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1470,7 +1508,7 @@ public void testMultiDocSuggestions() throws Exception { public void testSuggestWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -1482,12 +1520,12 @@ public void testSuggestWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); + assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); List builders = new ArrayList<>(); - builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "apple")); - builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "mango")); - builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "papaya")); + builders.add(client().prepareIndex(INDEX).setSource(FIELD, "apple")); + builders.add(client().prepareIndex(INDEX).setSource(FIELD, "mango")); + builders.add(client().prepareIndex(INDEX).setSource(FIELD, "papaya")); indexRandom(true, false, builders); CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("alias").text("app"); @@ -1501,7 +1539,8 @@ public void testSuggestOnlyExplain() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java index 01c867f487a3f..c9b14993d6e49 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -42,6 +42,7 @@ import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.RestStatus; import org.opensearch.search.suggest.CompletionSuggestSearchIT.CompletionMappingBuilder; import org.opensearch.search.suggest.completion.CompletionSuggestionBuilder; @@ -72,7 +73,6 @@ public class ContextCompletionSuggestSearchIT extends OpenSearchIntegTestCase { private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); - private final String TYPE = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); @Override @@ -102,7 +102,7 @@ public void testContextPrefix() throws Exception { source.field("type", "type" + i % 3); } source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD) @@ -138,7 +138,7 @@ public void testContextRegex() throws Exception { source.field("type", "type" + i % 3); } source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD) @@ -174,7 +174,7 @@ public void testContextFuzzy() throws Exception { source.field("type", "type" + i % 3); } source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD) @@ -193,7 +193,8 @@ public void testContextFilteringWorksWithUTF8Categories() throws Exception { LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); - IndexResponse indexResponse = client().prepareIndex(INDEX, TYPE, "1") + IndexResponse indexResponse = client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -222,7 +223,8 @@ public void testSingleContextFiltering() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -253,7 +255,8 @@ public void testSingleContextBoosting() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -297,7 +300,7 @@ public void testMultiContextFiltering() throws Exception { .field("cat", "cat" + i % 2) .field("type", "type" + i % 4) .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -339,7 +342,7 @@ public void testMultiContextBoosting() throws Exception { .field("cat", "cat" + i % 2) .field("type", "type" + i % 4) .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -412,7 +415,7 @@ public void testSeveralContexts() throws Exception { source.field("type" + c, "type" + c + i % 4); } source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -445,7 +448,7 @@ public void testGeoFiltering() throws Exception { .endObject() .endObject() .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -479,7 +482,7 @@ public void testGeoBoosting() throws Exception { .endObject() .endObject() .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -512,7 +515,7 @@ public void testGeoPointContext() throws Exception { .endObject() .endObject() .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD) @@ -554,7 +557,7 @@ public void testGeoNeighbours() throws Exception { .endObject() .endObject() .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -573,7 +576,6 @@ public void testGeoNeighbours() throws Exception { public void testGeoField() throws Exception { XContentBuilder mapping = jsonBuilder(); mapping.startObject(); - mapping.startObject(TYPE); mapping.startObject("properties"); mapping.startObject("location"); mapping.startObject("properties"); @@ -605,9 +607,8 @@ public void testGeoField() throws Exception { mapping.endObject(); mapping.endObject(); mapping.endObject(); - mapping.endObject(); - assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); + assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); XContentBuilder source1 = jsonBuilder().startObject() .startObject("location") @@ -617,7 +618,7 @@ public void testGeoField() throws Exception { .array("input", "Hotel Amsterdam in Berlin") .endObject() .endObject(); - client().prepareIndex(INDEX, TYPE, "1").setSource(source1).get(); + client().prepareIndex(INDEX).setId("1").setSource(source1).get(); XContentBuilder source2 = jsonBuilder().startObject() .startObject("location") @@ -627,7 +628,7 @@ public void testGeoField() throws Exception { .array("input", "Hotel Berlin in Amsterdam") .endObject() .endObject(); - client().prepareIndex(INDEX, TYPE, "2").setSource(source2).get(); + client().prepareIndex(INDEX).setId("2").setSource(source2).get(); refresh(); @@ -671,7 +672,7 @@ public void testSkipDuplicatesWithContexts() throws Exception { .field("cat", "cat" + id % 2) .field("type", "type" + id) .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } String[] expected = new String[numUnique]; for (int i = 0; i < numUnique; i++) { @@ -705,7 +706,6 @@ private void createIndexAndMapping(CompletionMappingBuilder completionMappingBui private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder completionMappingBuilder) throws IOException { XContentBuilder mapping = jsonBuilder().startObject() - .startObject(TYPE) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -747,14 +747,14 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi for (String fieldName : categoryContextFields) { mapping.startObject(fieldName).field("type", randomBoolean() ? "keyword" : "text").endObject(); } - mapping.endObject().endObject().endObject(); + mapping.endObject().endObject(); assertAcked( client().admin() .indices() .prepareCreate(INDEX) .setSettings(Settings.builder().put(indexSettings()).put(settings)) - .addMapping(TYPE, mapping) + .addMapping(MapperService.SINGLE_MAPPING_NAME, mapping) .get() ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java index 4e116a7be140d..bb6e1643dd767 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java @@ -320,9 +320,9 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut indexRandom( true, - client().prepareIndex("test", "type1").setSource("name", "I like iced tea"), - client().prepareIndex("test", "type1").setSource("name", "I like tea."), - client().prepareIndex("test", "type1").setSource("name", "I like ice cream.") + client().prepareIndex("test").setSource("name", "I like iced tea"), + client().prepareIndex("test").setSource("name", "I like tea."), + client().prepareIndex("test").setSource("name", "I like ice cream.") ); refresh(); @@ -804,9 +804,9 @@ public void testDifferentShardSize() throws Exception { ensureGreen(); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "foobar1").setRouting("1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "foobar2").setRouting("2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "foobar3").setRouting("3") + client().prepareIndex("test").setId("1").setSource("field1", "foobar1").setRouting("1"), + client().prepareIndex("test").setId("2").setSource("field1", "foobar2").setRouting("2"), + client().prepareIndex("test").setId("3").setSource("field1", "foobar3").setRouting("3") ); Suggest suggest = searchSuggest( @@ -1143,7 +1143,7 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi List builders = new ArrayList<>(); for (String title : titles) { - builders.add(client().prepareIndex("test", "type1").setSource("title", title)); + builders.add(client().prepareIndex("test").setSource("title", title)); } indexRandom(true, builders); @@ -1181,9 +1181,9 @@ public void testSuggestWithFieldAlias() throws Exception { assertAcked(prepareCreate("test").addMapping("type", mapping)); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); - builders.add(client().prepareIndex("test", "type").setSource("text", "mango")); - builders.add(client().prepareIndex("test", "type").setSource("text", "papaya")); + builders.add(client().prepareIndex("test").setSource("text", "apple")); + builders.add(client().prepareIndex("test").setSource("text", "mango")); + builders.add(client().prepareIndex("test").setSource("text", "papaya")); indexRandom(true, false, builders); TermSuggestionBuilder termSuggest = termSuggestion("alias").text("appple"); @@ -1208,10 +1208,10 @@ public void testPhraseSuggestMinDocFreq() throws Exception { ); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); - builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); - builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); - builders.add(client().prepareIndex("test", "type").setSource("text", "appfle")); + builders.add(client().prepareIndex("test").setSource("text", "apple")); + builders.add(client().prepareIndex("test").setSource("text", "apple")); + builders.add(client().prepareIndex("test").setSource("text", "apple")); + builders.add(client().prepareIndex("test").setSource("text", "appfle")); indexRandom(true, false, builders); PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("text").text("appple") @@ -1321,7 +1321,7 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE List builders = new ArrayList<>(); for (String title : titles) { - builders.add(client().prepareIndex("test", "type1").setSource("title", title)); + builders.add(client().prepareIndex("test").setSource("title", title)); } indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java index 41d934212193b..57c14876b25ff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java @@ -81,7 +81,8 @@ public void testCustomBM25Similarity() throws Exception { .execute() .actionGet(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource("field1", "the quick brown fox jumped over the lazy dog", "field2", "the quick brown fox jumped over the lazy dog") .setRefreshPolicy(IMMEDIATE) .execute() diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java index f0673236a8be6..c253f1a4f876e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java @@ -545,7 +545,7 @@ public void testSnapshotWithCorruptedShardIndexFile() throws Exception { final IndexRequestBuilder[] documents = new IndexRequestBuilder[nDocs]; for (int j = 0; j < nDocs; j++) { - documents[j] = client.prepareIndex(indexName, "_doc").setSource("foo", "bar"); + documents[j] = client.prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(true, documents); flushAndRefresh(); @@ -591,7 +591,7 @@ public void testSnapshotWithCorruptedShardIndexFile() throws Exception { logger.info("--> indexing [{}] more documents into [{}]", nDocs, indexName); for (int j = 0; j < nDocs; j++) { - documents[j] = client.prepareIndex(indexName, "_doc").setSource("foo2", "bar2"); + documents[j] = client.prepareIndex(indexName).setSource("foo2", "bar2"); } indexRandom(true, documents); @@ -618,8 +618,8 @@ public void testDeleteSnapshotWithMissingIndexAndShardMetadata() throws Exceptio logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); logger.info("--> creating snapshot"); @@ -671,8 +671,8 @@ public void testDeleteSnapshotWithMissingMetadata() throws Exception { logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); logger.info("--> creating snapshot"); @@ -718,8 +718,8 @@ public void testDeleteSnapshotWithCorruptedSnapshotFile() throws Exception { logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); logger.info("--> creating snapshot"); @@ -775,9 +775,9 @@ public void testDeleteSnapshotWithCorruptedGlobalState() throws Exception { createIndex("test-idx-1", "test-idx-2"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); flushAndRefresh("test-idx-1", "test-idx-2"); @@ -823,8 +823,8 @@ public void testSnapshotWithMissingShardLevelIndexFile() throws Exception { logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); logger.info("--> creating snapshot"); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 93c1f5a9ef398..47d57e1260b5f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -1014,7 +1014,7 @@ public void testSnapshotTotalAndIncrementalSizes() throws Exception { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "type").setSource("test", "init").execute().actionGet(); + client().prepareIndex(indexName).setSource("test", "init").execute().actionGet(); } final Path repoPath = randomRepoPath(); @@ -1047,7 +1047,7 @@ public void testSnapshotTotalAndIncrementalSizes() throws Exception { // add few docs - less than initially docs = between(1, 5); for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "type").setSource("test", "test" + i).execute().actionGet(); + client().prepareIndex(indexName).setSource("test", "test" + i).execute().actionGet(); } // create another snapshot @@ -1099,7 +1099,7 @@ public void testDeduplicateIndexMetadata() throws Exception { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "_doc").setSource("test", "init").execute().actionGet(); + client().prepareIndex(indexName).setSource("test", "init").execute().actionGet(); } final Path repoPath = randomRepoPath(); @@ -1111,7 +1111,7 @@ public void testDeduplicateIndexMetadata() throws Exception { docs = between(1, 5); for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "_doc").setSource("test", "test" + i).execute().actionGet(); + client().prepareIndex(indexName).setSource("test", "test" + i).execute().actionGet(); } logger.info("--> restart random data node and add new data node to change index allocation"); @@ -1131,7 +1131,7 @@ public void testDeduplicateIndexMetadata() throws Exception { // index to some other field to trigger a change in index metadata for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "_doc").setSource("new_field", "test" + i).execute().actionGet(); + client().prepareIndex(indexName).setSource("new_field", "test" + i).execute().actionGet(); } createFullSnapshot(repositoryName, snapshot2); @@ -1268,7 +1268,7 @@ public void testRetentionLeasesClearedOnRestore() throws Exception { logger.debug("--> indexing {} docs into {}", snapshotDocCount, indexName); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[snapshotDocCount]; for (int i = 0; i < snapshotDocCount; i++) { - indexRequestBuilders[i] = client().prepareIndex(indexName, "_doc").setSource("field", "value"); + indexRequestBuilders[i] = client().prepareIndex(indexName).setSource("field", "value"); } indexRandom(true, indexRequestBuilders); assertDocCount(indexName, snapshotDocCount); @@ -1293,7 +1293,7 @@ public void testRetentionLeasesClearedOnRestore() throws Exception { logger.debug("--> indexing {} extra docs into {}", extraDocCount, indexName); indexRequestBuilders = new IndexRequestBuilder[extraDocCount]; for (int i = 0; i < extraDocCount; i++) { - indexRequestBuilders[i] = client().prepareIndex(indexName, "_doc").setSource("field", "value"); + indexRequestBuilders[i] = client().prepareIndex(indexName).setSource("field", "value"); } indexRandom(true, indexRequestBuilders); } diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java index aad0f2576d2a3..608a439b40fec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java @@ -81,11 +81,11 @@ public void testWhenMetadataAreLoaded() throws Exception { createIndex("docs"); indexRandom( true, - client().prepareIndex("docs", "doc", "1").setSource("rank", 1), - client().prepareIndex("docs", "doc", "2").setSource("rank", 2), - client().prepareIndex("docs", "doc", "3").setSource("rank", 3), - client().prepareIndex("others", "other").setSource("rank", 4), - client().prepareIndex("others", "other").setSource("rank", 5) + client().prepareIndex("docs").setId("1").setSource("rank", 1), + client().prepareIndex("docs").setId("2").setSource("rank", 2), + client().prepareIndex("docs").setId("3").setSource("rank", 3), + client().prepareIndex("others").setSource("rank", 4), + client().prepareIndex("others").setSource("rank", 5) ); createRepository("repository", CountingMockRepositoryPlugin.TYPE); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java index a131ab9ff70e7..643a301c025c3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java @@ -284,7 +284,7 @@ public void testRestoreWithDifferentMappingsAndSettings() throws Exception { NumShards numShards = getNumShards("test-idx"); - assertAcked(client().admin().indices().preparePutMapping("test-idx").setType("_doc").setSource("baz", "type=text")); + assertAcked(client().admin().indices().preparePutMapping("test-idx").setSource("baz", "type=text")); ensureGreen(); logger.info("--> snapshot it"); @@ -310,7 +310,7 @@ public void testRestoreWithDifferentMappingsAndSettings() throws Exception { .put("refresh_interval", 5, TimeUnit.SECONDS) ) ); - assertAcked(client().admin().indices().preparePutMapping("test-idx").setType("_doc").setSource("foo", "type=text")); + assertAcked(client().admin().indices().preparePutMapping("test-idx").setSource("foo", "type=text")); ensureGreen(); logger.info("--> close index"); @@ -735,13 +735,12 @@ public void testChangeSettingsOnRestore() throws Exception { client().admin() .indices() .preparePutMapping("test-idx") - .setType("_doc") .setSource("field1", "type=text,analyzer=standard,search_analyzer=my_analyzer") ); final int numdocs = randomIntBetween(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test-idx", "_doc").setId(Integer.toString(i)).setSource("field1", "Foo bar " + i); + builders[i] = client().prepareIndex("test-idx").setId(Integer.toString(i)).setSource("field1", "Foo bar " + i); } indexRandom(true, builders); flushAndRefresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java index f3190585cff85..88fcd075a563f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -201,13 +201,13 @@ public void testBasicWorkFlow() throws Exception { logger.info("--> delete some data"); for (int i = 0; i < 50; i++) { - client().prepareDelete("test-idx-1", "_doc", Integer.toString(i)).get(); + client().prepareDelete("test-idx-1", Integer.toString(i)).get(); } for (int i = 50; i < 100; i++) { - client().prepareDelete("test-idx-2", "_doc", Integer.toString(i)).get(); + client().prepareDelete("test-idx-2", Integer.toString(i)).get(); } for (int i = 0; i < 100; i += 2) { - client().prepareDelete("test-idx-3", "_doc", Integer.toString(i)).get(); + client().prepareDelete("test-idx-3", Integer.toString(i)).get(); } assertAllSuccessful(refresh()); assertDocCount("test-idx-1", 50L); @@ -1400,7 +1400,7 @@ public void testSnapshotMoreThanOnce() throws InterruptedException { } } - client().prepareDelete("test", "_doc", "1").get(); + client().prepareDelete("test", "1").get(); createSnapshot("test-repo", "test-2", Collections.singletonList("test")); assertThat(getSnapshot("test-repo", "test-2").state(), equalTo(SnapshotState.SUCCESS)); { @@ -1643,9 +1643,9 @@ public void testListCorruptedSnapshot() throws Exception { logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-3", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar"), + client().prepareIndex("test-idx-3").setSource("foo", "bar") ); logger.info("--> creating 2 snapshots"); @@ -1708,9 +1708,9 @@ public void testRestoreSnapshotWithCorruptedGlobalState() throws Exception { createIndex("test-idx-1", "test-idx-2"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); flushAndRefresh("test-idx-1", "test-idx-2"); @@ -1771,7 +1771,7 @@ public void testRestoreSnapshotWithCorruptedIndexMetadata() throws Exception { IndexRequestBuilder[] documents = new IndexRequestBuilder[nbDocs]; for (int j = 0; j < nbDocs; j++) { - documents[j] = client.prepareIndex(indexName, "_doc").setSource("foo", "bar"); + documents[j] = client.prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(true, documents); } diff --git a/server/src/internalClusterTest/java/org/opensearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/opensearch/threadpool/SimpleThreadPoolIT.java index 390600b2667d3..341725866b545 100644 --- a/server/src/internalClusterTest/java/org/opensearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/threadpool/SimpleThreadPoolIT.java @@ -72,7 +72,7 @@ public void testThreadNames() throws Exception { int numDocs = randomIntBetween(2, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("str_value", "s" + i) diff --git a/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java b/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java index 8f0188c592527..c6ec91a6ab078 100644 --- a/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java @@ -167,7 +167,7 @@ public void testUpsert() throws Exception { ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute() @@ -180,7 +180,7 @@ public void testUpsert() throws Exception { assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1")); } - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute() @@ -209,7 +209,7 @@ public void testScriptedUpsert() throws Exception { // Pay money from what will be a new account and opening balance comes from upsert doc // provided by client - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("balance", openingBalance).endObject()) .setScriptedUpsert(true) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, UPSERT_SCRIPT, params)) @@ -224,7 +224,7 @@ public void testScriptedUpsert() throws Exception { } // Now pay money for an existing account where balance is stored in es - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("balance", openingBalance).endObject()) .setScriptedUpsert(true) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, UPSERT_SCRIPT, params)) @@ -243,7 +243,7 @@ public void testUpsertDoc() throws Exception { createTestIndex(); ensureGreen(); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setDocAsUpsert(true) .setFetchSource(true) @@ -261,7 +261,7 @@ public void testNotUpsertDoc() throws Exception { ensureGreen(); assertFutureThrows( - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setDocAsUpsert(false) .setFetchSource(true) @@ -274,7 +274,7 @@ public void testUpsertFields() throws Exception { createTestIndex(); ensureGreen(); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) @@ -287,7 +287,7 @@ public void testUpsertFields() throws Exception { assertThat(updateResponse.getGetResult().sourceAsMap().get("bar").toString(), equalTo("baz")); assertThat(updateResponse.getGetResult().sourceAsMap().get("extra"), nullValue()); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) @@ -302,7 +302,7 @@ public void testUpsertFields() throws Exception { } public void testIndexAutoCreation() throws Exception { - UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate("test", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) @@ -324,16 +324,13 @@ public void testUpdate() throws Exception { Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); DocumentMissingException ex = expectThrows( DocumentMissingException.class, - () -> client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet() + () -> client().prepareUpdate(indexOrAlias(), "1").setScript(fieldIncScript).execute().actionGet() ); assertEquals("[1]: document missing", ex.getMessage()); - client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field", 1).execute().actionGet(); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(fieldIncScript) - .execute() - .actionGet(); + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1").setScript(fieldIncScript).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(2L)); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); @@ -346,7 +343,7 @@ public void testUpdate() throws Exception { Map params = new HashMap<>(); params.put("inc", 3); params.put("field", "field"); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, params)) .execute() .actionGet(); @@ -360,7 +357,7 @@ public void testUpdate() throws Exception { } // check noop - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setScript( new Script( ScriptType.INLINE, @@ -381,7 +378,7 @@ public void testUpdate() throws Exception { } // check delete - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setScript( new Script( ScriptType.INLINE, @@ -402,8 +399,8 @@ public void testUpdate() throws Exception { } // check _source parameter - client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet(); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareIndex("test").setId("1").setSource("field1", 1, "field2", 2).execute().actionGet(); + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field1"))) .setFetchSource("field1", "field2") .get(); @@ -417,8 +414,8 @@ public void testUpdate() throws Exception { // check updates without script // add new field - client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareIndex("test").setId("1").setSource("field", 1).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()) .execute() .actionGet(); @@ -429,7 +426,7 @@ public void testUpdate() throws Exception { } // change existing field - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()) .execute() .actionGet(); @@ -449,8 +446,8 @@ public void testUpdate() throws Exception { testMap.put("commonkey", testMap2); testMap.put("map1", 8); - client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet(); - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareIndex("test").setId("1").setSource("map", testMap).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()) .execute() .actionGet(); @@ -473,10 +470,10 @@ public void testUpdateWithIfSeqNo() throws Exception { createTestIndex(); ensureGreen(); - IndexResponse result = client().prepareIndex("test", "type1", "1").setSource("field", 1).get(); + IndexResponse result = client().prepareIndex("test").setId("1").setSource("field", 1).get(); expectThrows( VersionConflictEngineException.class, - () -> client().prepareUpdate(indexOrAlias(), "type1", "1") + () -> client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 2).endObject()) .setIfSeqNo(result.getSeqNo() + 1) .setIfPrimaryTerm(result.getPrimaryTerm()) @@ -485,7 +482,7 @@ public void testUpdateWithIfSeqNo() throws Exception { expectThrows( VersionConflictEngineException.class, - () -> client().prepareUpdate(indexOrAlias(), "type1", "1") + () -> client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 2).endObject()) .setIfSeqNo(result.getSeqNo()) .setIfPrimaryTerm(result.getPrimaryTerm() + 1) @@ -494,14 +491,14 @@ public void testUpdateWithIfSeqNo() throws Exception { expectThrows( VersionConflictEngineException.class, - () -> client().prepareUpdate(indexOrAlias(), "type1", "1") + () -> client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 2).endObject()) .setIfSeqNo(result.getSeqNo() + 1) .setIfPrimaryTerm(result.getPrimaryTerm() + 1) .get() ); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 2).endObject()) .setIfSeqNo(result.getSeqNo()) .setIfPrimaryTerm(result.getPrimaryTerm()) @@ -517,7 +514,7 @@ public void testUpdateRequestWithBothScriptAndDoc() throws Exception { Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); try { - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute() @@ -535,7 +532,7 @@ public void testUpdateRequestWithScriptAndShouldUpsertDoc() throws Exception { ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); try { - client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).setDocAsUpsert(true).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "1").setScript(fieldIncScript).setDocAsUpsert(true).execute().actionGet(); fail("Should have thrown ActionRequestValidationException"); } catch (ActionRequestValidationException e) { assertThat(e.validationErrors().size(), equalTo(1)); @@ -551,23 +548,16 @@ public void testContextVariables() throws Exception { // Index some documents client().prepareIndex() .setIndex("test") - .setType("type1") .setId("id1") .setRouting("routing1") .setSource("field1", 1, "content", "foo") .execute() .actionGet(); - client().prepareIndex() - .setIndex("test") - .setType("type1") - .setId("id2") - .setSource("field1", 0, "content", "bar") - .execute() - .actionGet(); + client().prepareIndex().setIndex("test").setId("id2").setSource("field1", 0, "content", "bar").execute().actionGet(); // Update the first object and note context variables values - UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "id1") + UpdateResponse updateResponse = client().prepareUpdate("test", "id1") .setRouting("routing1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, EXTRACT_CTX_SCRIPT, Collections.emptyMap())) .execute() @@ -583,7 +573,7 @@ public void testContextVariables() throws Exception { assertEquals("routing1", updateContext.get("_routing")); // Idem with the second object - updateResponse = client().prepareUpdate("test", "type1", "id2") + updateResponse = client().prepareUpdate("test", "id2") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, EXTRACT_CTX_SCRIPT, Collections.emptyMap())) .execute() .actionGet(); @@ -627,17 +617,13 @@ public void run() { ); } if (useBulkApi) { - UpdateRequestBuilder updateRequestBuilder = client().prepareUpdate( - indexOrAlias(), - "type1", - Integer.toString(i) - ) + UpdateRequestBuilder updateRequestBuilder = client().prepareUpdate(indexOrAlias(), Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()); client().prepareBulk().add(updateRequestBuilder).execute().actionGet(); } else { - client().prepareUpdate(indexOrAlias(), "type1", Integer.toString(i)) + client().prepareUpdate(indexOrAlias(), Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) @@ -759,7 +745,7 @@ public void run() { for (int k = 0; k < numberOfUpdatesPerId; ++k) { updateRequestsOutstanding.acquire(); try { - UpdateRequest ur = client().prepareUpdate("test", "type1", Integer.toString(j)) + UpdateRequest ur = client().prepareUpdate("test", Integer.toString(j)) .setScript(fieldIncScript) .setRetryOnConflict(retryOnConflict) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) @@ -791,7 +777,7 @@ public void run() { try { deleteRequestsOutstanding.acquire(); - DeleteRequest dr = client().prepareDelete("test", "type1", Integer.toString(j)).request(); + DeleteRequest dr = client().prepareDelete("test", Integer.toString(j)).request(); client().delete(dr, new DeleteListener(j)); } catch (NoNodeAvailableException nne) { deleteRequestsOutstanding.release(); @@ -878,7 +864,7 @@ private void waitForOutstandingRequests(TimeValue timeOut, Semaphore requestsOut // This means that we add 1 to the expected versions and attempts // All the previous operations should be complete or failed at this point for (int i = 0; i < numberOfIdsPerThread; ++i) { - client().prepareUpdate("test", "type1", Integer.toString(i)) + client().prepareUpdate("test", Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) diff --git a/server/src/internalClusterTest/java/org/opensearch/update/UpdateNoopIT.java b/server/src/internalClusterTest/java/org/opensearch/update/UpdateNoopIT.java index c2eb76a0dbe62..606a5fe1b7eca 100644 --- a/server/src/internalClusterTest/java/org/opensearch/update/UpdateNoopIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/update/UpdateNoopIT.java @@ -327,7 +327,7 @@ private void updateAndCheckSource(long expectedSeqNo, long expectedVersion, Bool } private UpdateResponse update(Boolean detectNoop, long expectedSeqNo, long expectedVersion, XContentBuilder xContentBuilder) { - UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1") + UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "1") .setDoc(xContentBuilder) .setDocAsUpsert(true) .setFetchSource(true); diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index 98063c58b90a6..51d0a4395127a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -39,6 +39,7 @@ import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.MoreLikeThisQueryBuilder.Item; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -77,11 +78,10 @@ public void testSimpleValidateQuery() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("type1") .setSource( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("foo") .field("type", "text") @@ -179,11 +179,10 @@ public void testExplainValidateQueryTwoNodes() throws IOException { client().admin() .indices() .preparePutMapping("test") - .setType("type1") .setSource( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("foo") .field("type", "text") @@ -255,7 +254,7 @@ public void testExplainDateRangeInQueryString() { String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plus(1, ChronoUnit.MONTHS)); String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minus(1, ChronoUnit.MONTHS)); - client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); + client().prepareIndex("test").setId("1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); @@ -319,13 +318,13 @@ public void testExplainWithRewriteValidateQuery() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type1", "field", "type=text,analyzer=whitespace") + .addMapping(MapperService.SINGLE_MAPPING_NAME, "field", "type=text,analyzer=whitespace") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)) .get(); - client().prepareIndex("test", "type1", "1").setSource("field", "quick lazy huge brown pidgin").get(); - client().prepareIndex("test", "type1", "2").setSource("field", "the quick brown fox").get(); - client().prepareIndex("test", "type1", "3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); - client().prepareIndex("test", "type1", "4").setSource("field", "the lazy dog quacks like a duck").get(); + client().prepareIndex("test").setId("1").setSource("field", "quick lazy huge brown pidgin").get(); + client().prepareIndex("test").setId("2").setSource("field", "the quick brown fox").get(); + client().prepareIndex("test").setId("3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); + client().prepareIndex("test").setId("4").setSource("field", "the lazy dog quacks like a duck").get(); refresh(); // prefix queries @@ -381,15 +380,15 @@ public void testExplainWithRewriteValidateQueryAllShards() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type1", "field", "type=text,analyzer=whitespace") + .addMapping(MapperService.SINGLE_MAPPING_NAME, "field", "type=text,analyzer=whitespace") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put("index.number_of_routing_shards", 2)) .get(); // We are relying on specific routing behaviors for the result to be right, so // we cannot randomize the number of shards or change ids here. - client().prepareIndex("test", "type1", "1").setSource("field", "quick lazy huge brown pidgin").get(); - client().prepareIndex("test", "type1", "2").setSource("field", "the quick brown fox").get(); - client().prepareIndex("test", "type1", "3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); - client().prepareIndex("test", "type1", "4").setSource("field", "the lazy dog quacks like a duck").get(); + client().prepareIndex("test").setId("1").setSource("field", "quick lazy huge brown pidgin").get(); + client().prepareIndex("test").setId("2").setSource("field", "the quick brown fox").get(); + client().prepareIndex("test").setId("3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); + client().prepareIndex("test").setId("4").setSource("field", "the lazy dog quacks like a duck").get(); refresh(); // prefix queries @@ -447,7 +446,6 @@ private static void assertExplanation(QueryBuilder queryBuilder, Matcher ValidateQueryResponse response = client().admin() .indices() .prepareValidateQuery("test") - .setTypes("type1") .setQuery(queryBuilder) .setExplain(true) .setRewrite(withRewrite) @@ -468,7 +466,6 @@ private static void assertExplanations( ValidateQueryResponse response = client().admin() .indices() .prepareValidateQuery("test") - .setTypes("type1") .setQuery(queryBuilder) .setExplain(true) .setRewrite(withRewrite) @@ -490,14 +487,13 @@ public void testExplainTermsQueryWithLookup() throws Exception { .addMapping("_doc", "user", "type=integer", "followers", "type=integer") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put("index.number_of_routing_shards", 2)) .get(); - client().prepareIndex("twitter", "_doc", "1").setSource("followers", new int[] { 1, 2, 3 }).get(); + client().prepareIndex("twitter").setId("1").setSource("followers", new int[] { 1, 2, 3 }).get(); refresh(); TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "_doc", "1", "followers")); ValidateQueryResponse response = client().admin() .indices() .prepareValidateQuery("twitter") - .setTypes("_doc") .setQuery(termsLookupQuery) .setExplain(true) .execute() diff --git a/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentDocumentOperationIT.java b/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentDocumentOperationIT.java index 81e27c64f821a..e433a489ad572 100644 --- a/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentDocumentOperationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentDocumentOperationIT.java @@ -55,7 +55,7 @@ public void testConcurrentOperationOnSameDoc() throws Exception { final AtomicReference failure = new AtomicReference<>(); final CountDownLatch latch = new CountDownLatch(numberOfUpdates); for (int i = 0; i < numberOfUpdates; i++) { - client().prepareIndex("test", "type1", "1").setSource("field1", i).execute(new ActionListener() { + client().prepareIndex("test").setId("1").setSource("field1", i).execute(new ActionListener() { @Override public void onResponse(IndexResponse response) { latch.countDown(); diff --git a/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentSeqNoVersioningIT.java b/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentSeqNoVersioningIT.java index a14e1279c1051..2194152284d37 100644 --- a/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentSeqNoVersioningIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentSeqNoVersioningIT.java @@ -160,7 +160,7 @@ public void testSeqNoCASLinearizability() { logger.info("--> Indexing initial doc for {} keys", numberOfKeys); List partitions = IntStream.range(0, numberOfKeys) - .mapToObj(i -> client().prepareIndex("test", "type", "ID:" + i).setSource("value", -1).get()) + .mapToObj(i -> client().prepareIndex("test").setId("ID:" + i).setSource("value", -1).get()) .map(response -> new Partition(response.getId(), new Version(response.getPrimaryTerm(), response.getSeqNo()))) .collect(Collectors.toList()); diff --git a/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java b/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java index 9cbcc19cb47eb..629b20edbb44d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java @@ -72,7 +72,7 @@ public void testExternalVersioningInitialDelete() throws Exception { // Note - external version doesn't throw version conflicts on deletes of non existent records. // This is different from internal versioning - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1") + DeleteResponse deleteResponse = client().prepareDelete("test", "1") .setVersion(17) .setVersionType(VersionType.EXTERNAL) .execute() @@ -81,7 +81,8 @@ public void testExternalVersioningInitialDelete() throws Exception { // this should conflict with the delete command transaction which told us that the object was deleted at version 17. assertFutureThrows( - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(13) .setVersionType(VersionType.EXTERNAL) @@ -89,7 +90,8 @@ public void testExternalVersioningInitialDelete() throws Exception { VersionConflictEngineException.class ); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(18) .setVersionType(VersionType.EXTERNAL) @@ -101,21 +103,24 @@ public void testExternalVersioningInitialDelete() throws Exception { public void testExternalGTE() throws Exception { createIndex("test"); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(12) .setVersionType(VersionType.EXTERNAL_GTE) .get(); assertThat(indexResponse.getVersion(), equalTo(12L)); - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_2") .setVersion(12) .setVersionType(VersionType.EXTERNAL_GTE) .get(); assertThat(indexResponse.getVersion(), equalTo(12L)); - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_2") .setVersion(14) .setVersionType(VersionType.EXTERNAL_GTE) @@ -123,7 +128,8 @@ public void testExternalGTE() throws Exception { assertThat(indexResponse.getVersion(), equalTo(14L)); assertRequestBuilderThrows( - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(13) .setVersionType(VersionType.EXTERNAL_GTE), @@ -140,13 +146,13 @@ public void testExternalGTE() throws Exception { // deleting with a lower version fails. assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE), + client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE), VersionConflictEngineException.class ); // Delete with a higher or equal version deletes all versions up to the given one. long v = randomIntBetween(14, 17); - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1") + DeleteResponse deleteResponse = client().prepareDelete("test", "1") .setVersion(v) .setVersionType(VersionType.EXTERNAL_GTE) .execute() @@ -156,16 +162,12 @@ public void testExternalGTE() throws Exception { // Deleting with a lower version keeps on failing after a delete. assertFutureThrows( - client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE).execute(), + client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE).execute(), VersionConflictEngineException.class ); // But delete with a higher version is OK. - deleteResponse = client().prepareDelete("test", "type", "1") - .setVersion(18) - .setVersionType(VersionType.EXTERNAL_GTE) - .execute() - .actionGet(); + deleteResponse = client().prepareDelete("test", "1").setVersion(18).setVersionType(VersionType.EXTERNAL_GTE).execute().actionGet(); assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(18L)); } @@ -174,7 +176,8 @@ public void testExternalVersioning() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(12) .setVersionType(VersionType.EXTERNAL) @@ -182,7 +185,8 @@ public void testExternalVersioning() throws Exception { .actionGet(); assertThat(indexResponse.getVersion(), equalTo(12L)); - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(14) .setVersionType(VersionType.EXTERNAL) @@ -191,7 +195,8 @@ public void testExternalVersioning() throws Exception { assertThat(indexResponse.getVersion(), equalTo(14L)); assertFutureThrows( - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(13) .setVersionType(VersionType.EXTERNAL) @@ -208,12 +213,12 @@ public void testExternalVersioning() throws Exception { // deleting with a lower version fails. assertFutureThrows( - client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(), + client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(), VersionConflictEngineException.class ); // Delete with a higher version deletes all versions up to the given one. - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1") + DeleteResponse deleteResponse = client().prepareDelete("test", "1") .setVersion(17) .setVersionType(VersionType.EXTERNAL) .execute() @@ -223,22 +228,19 @@ public void testExternalVersioning() throws Exception { // Deleting with a lower version keeps on failing after a delete. assertFutureThrows( - client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(), + client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(), VersionConflictEngineException.class ); // But delete with a higher version is OK. - deleteResponse = client().prepareDelete("test", "type", "1") - .setVersion(18) - .setVersionType(VersionType.EXTERNAL) - .execute() - .actionGet(); + deleteResponse = client().prepareDelete("test", "1").setVersion(18).setVersionType(VersionType.EXTERNAL).execute().actionGet(); assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(18L)); // TODO: This behavior breaks rest api returning http status 201 // good news is that it this is only the case until deletes GC kicks in. - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(19) .setVersionType(VersionType.EXTERNAL) @@ -246,11 +248,7 @@ public void testExternalVersioning() throws Exception { .actionGet(); assertThat(indexResponse.getVersion(), equalTo(19L)); - deleteResponse = client().prepareDelete("test", "type", "1") - .setVersion(20) - .setVersionType(VersionType.EXTERNAL) - .execute() - .actionGet(); + deleteResponse = client().prepareDelete("test", "1").setVersion(20).setVersionType(VersionType.EXTERNAL).execute().actionGet(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(20L)); @@ -262,7 +260,8 @@ public void testExternalVersioning() throws Exception { Thread.sleep(300); // gc works based on estimated sampled time. Give it a chance... // And now we have previous version return -1 - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(20) .setVersionType(VersionType.EXTERNAL) @@ -295,11 +294,12 @@ public void testCompareAndSetInitialDelete() throws Exception { ensureGreen(); assertFutureThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(17).setIfPrimaryTerm(10).execute(), + client().prepareDelete("test", "1").setIfSeqNo(17).setIfPrimaryTerm(10).execute(), VersionConflictEngineException.class ); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setCreate(true) .execute() @@ -311,39 +311,39 @@ public void testCompareAndSet() { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); + IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertThat(indexResponse.getSeqNo(), equalTo(0L)); assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setIfSeqNo(0L).setIfPrimaryTerm(1).get(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").setIfSeqNo(0L).setIfPrimaryTerm(1).get(); assertThat(indexResponse.getSeqNo(), equalTo(1L)); assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); assertFutureThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(1).execute(), + client().prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(1).execute(), VersionConflictEngineException.class ); assertFutureThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(2).execute(), + client().prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(2).execute(), VersionConflictEngineException.class ); assertFutureThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(1).setIfPrimaryTerm(2).execute(), + client().prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(1).setIfPrimaryTerm(2).execute(), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(10).setIfPrimaryTerm(1), + client().prepareDelete("test", "1").setIfSeqNo(10).setIfPrimaryTerm(1), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(10).setIfPrimaryTerm(2), + client().prepareDelete("test", "1").setIfSeqNo(10).setIfPrimaryTerm(2), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(2), + client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(2), VersionConflictEngineException.class ); @@ -367,27 +367,27 @@ public void testCompareAndSet() { assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(Versions.NOT_FOUND)); } - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(1).get(); + DeleteResponse deleteResponse = client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(1).get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getSeqNo(), equalTo(2L)); assertThat(deleteResponse.getPrimaryTerm(), equalTo(1L)); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(1), + client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(1), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(3).setIfPrimaryTerm(12), + client().prepareDelete("test", "1").setIfSeqNo(3).setIfPrimaryTerm(12), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(2), + client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(2), VersionConflictEngineException.class ); // the doc is deleted. Even when we hit the deleted seqNo, a conditional delete should fail. assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(2).setIfPrimaryTerm(1), + client().prepareDelete("test", "1").setIfSeqNo(2).setIfPrimaryTerm(1), VersionConflictEngineException.class ); } @@ -396,26 +396,26 @@ public void testSimpleVersioningWithFlush() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").get(); + IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").get(); assertThat(indexResponse.getSeqNo(), equalTo(0L)); client().admin().indices().prepareFlush().execute().actionGet(); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setIfSeqNo(0).setIfPrimaryTerm(1).get(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").setIfSeqNo(0).setIfPrimaryTerm(1).get(); assertThat(indexResponse.getSeqNo(), equalTo(1L)); client().admin().indices().prepareFlush().execute().actionGet(); assertRequestBuilderThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(0).setIfPrimaryTerm(1), + client().prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(0).setIfPrimaryTerm(1), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1"), + client().prepareIndex("test").setId("1").setCreate(true).setSource("field1", "value1_1"), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(0).setIfPrimaryTerm(1), + client().prepareDelete("test", "1").setIfSeqNo(0).setIfPrimaryTerm(1), VersionConflictEngineException.class ); @@ -443,7 +443,7 @@ public void testVersioningWithBulk() { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")) + .add(client().prepareIndex("test").setId("1").setSource("field1", "value1_1")) .execute() .actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(false)); @@ -723,7 +723,7 @@ public void run() { long version = idVersion.version; if (idVersion.delete) { try { - idVersion.response = client().prepareDelete("test", "type", id) + idVersion.response = client().prepareDelete("test", id) .setVersion(version) .setVersionType(VersionType.EXTERNAL) .execute() @@ -735,7 +735,8 @@ public void run() { } } else { try { - idVersion.response = client().prepareIndex("test", "type", id) + idVersion.response = client().prepareIndex("test") + .setId(id) .setSource("foo", "bar") .setVersion(version) .setVersionType(VersionType.EXTERNAL) @@ -818,7 +819,8 @@ public void testDeleteNotLost() throws Exception { client().admin().indices().prepareUpdateSettings("test").setSettings(newSettings).execute().actionGet(); // Index a doc: - client().prepareIndex("test", "type", "id") + client().prepareIndex("test") + .setId("id") .setSource("foo", "bar") .setOpType(DocWriteRequest.OpType.INDEX) .setVersion(10) @@ -832,7 +834,7 @@ public void testDeleteNotLost() throws Exception { } // Delete it - client().prepareDelete("test", "type", "id").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); + client().prepareDelete("test", "id").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); // Real-time get should reflect delete: assertThat("doc should have been deleted", client().prepareGet("test", "id").execute().actionGet().getVersion(), equalTo(-1L)); @@ -842,7 +844,7 @@ public void testDeleteNotLost() throws Exception { Thread.sleep(1000); // Delete an unrelated doc (provokes pruning deletes from versionMap) - client().prepareDelete("test", "type", "id2").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); + client().prepareDelete("test", "id2").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); // Real-time get should still reflect delete: assertThat("doc should have been deleted", client().prepareGet("test", "id").execute().actionGet().getVersion(), equalTo(-1L)); @@ -858,7 +860,8 @@ public void testGCDeletesZero() throws Exception { client().admin().indices().prepareUpdateSettings("test").setSettings(newSettings).execute().actionGet(); // Index a doc: - client().prepareIndex("test", "type", "id") + client().prepareIndex("test") + .setId("id") .setSource("foo", "bar") .setOpType(DocWriteRequest.OpType.INDEX) .setVersion(10) @@ -872,7 +875,7 @@ public void testGCDeletesZero() throws Exception { } // Delete it - client().prepareDelete("test", "type", "id").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); + client().prepareDelete("test", "id").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); // Real-time get should reflect delete even though index.gc_deletes is 0: assertThat("doc should have been deleted", client().prepareGet("test", "id").execute().actionGet().getVersion(), equalTo(-1L)); @@ -881,29 +884,33 @@ public void testGCDeletesZero() throws Exception { public void testSpecialVersioning() { internalCluster().ensureAtLeastNumDataNodes(2); createIndex("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build()); - IndexResponse doc1 = client().prepareIndex("test", "type", "1") + IndexResponse doc1 = client().prepareIndex("test") + .setId("1") .setSource("field", "value1") .setVersion(0) .setVersionType(VersionType.EXTERNAL) .execute() .actionGet(); assertThat(doc1.getVersion(), equalTo(0L)); - IndexResponse doc2 = client().prepareIndex("test", "type", "1") + IndexResponse doc2 = client().prepareIndex("test") + .setId("1") .setSource("field", "value2") .setVersion(Versions.MATCH_ANY) .setVersionType(VersionType.INTERNAL) .execute() .actionGet(); assertThat(doc2.getVersion(), equalTo(1L)); - client().prepareDelete("test", "type", "1").get(); // v2 - IndexResponse doc3 = client().prepareIndex("test", "type", "1") + client().prepareDelete("test", "1").get(); // v2 + IndexResponse doc3 = client().prepareIndex("test") + .setId("1") .setSource("field", "value3") .setVersion(Versions.MATCH_DELETED) .setVersionType(VersionType.INTERNAL) .execute() .actionGet(); assertThat(doc3.getVersion(), equalTo(3L)); - IndexResponse doc4 = client().prepareIndex("test", "type", "1") + IndexResponse doc4 = client().prepareIndex("test") + .setId("1") .setSource("field", "value4") .setVersion(4L) .setVersionType(VersionType.EXTERNAL_GTE) diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index 536e450da4a98..88e04a6c5dd77 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -79,6 +79,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_1_2_4 = new Version(1020499, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_2_5 = new Version(1020599, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_3_0 = new Version(1030099, org.apache.lucene.util.Version.LUCENE_8_10_1); + public static final Version V_1_4_0 = new Version(1040099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version CURRENT = V_2_0_0; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index 1d6c093f97b0e..b8a3b284273ae 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -53,7 +53,6 @@ import org.opensearch.common.xcontent.DeprecationHandler; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; @@ -61,7 +60,6 @@ import org.opensearch.common.xcontent.XContentType; import java.io.IOException; -import java.io.InputStream; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -82,7 +80,7 @@ * @see org.opensearch.client.Requests#createIndexRequest(String) * @see CreateIndexResponse */ -public class CreateIndexRequest extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { +public class CreateIndexRequest extends AcknowledgedRequest implements IndicesRequest { public static final ParseField MAPPINGS = new ParseField("mappings"); public static final ParseField SETTINGS = new ParseField("settings"); @@ -229,7 +227,9 @@ public CreateIndexRequest settings(Map source) { * @param type The mapping type * @param source The mapping source * @param xContentType The content type of the source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequest mapping(String type, String source, XContentType xContentType) { return mapping(type, new BytesArray(source), xContentType); } @@ -240,7 +240,9 @@ public CreateIndexRequest mapping(String type, String source, XContentType xCont * @param type The mapping type * @param source The mapping source * @param xContentType the content type of the mapping source + * @deprecated types are being removed */ + @Deprecated private CreateIndexRequest mapping(String type, BytesReference source, XContentType xContentType) { Objects.requireNonNull(xContentType); Map mappingAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); @@ -260,7 +262,9 @@ public CreateIndexRequest cause(String cause) { * * @param type The mapping type * @param source The mapping source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequest mapping(String type, XContentBuilder source) { return mapping(type, BytesReference.bytes(source), source.contentType()); } @@ -270,7 +274,9 @@ public CreateIndexRequest mapping(String type, XContentBuilder source) { * * @param type The mapping type * @param source The mapping source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequest mapping(String type, Map source) { if (mappings.containsKey(type)) { throw new IllegalStateException("mappings for type \"" + type + "\" were already defined"); @@ -292,9 +298,11 @@ public CreateIndexRequest mapping(String type, Map source) { /** * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequest mapping(String type, Object... source) { - mapping(type, PutMappingRequest.buildFromSimplifiedDef(type, source)); + mapping(type, PutMappingRequest.buildFromSimplifiedDef(source)); return this; } @@ -473,33 +481,4 @@ public void writeTo(StreamOutput out) throws IOException { } waitForActiveShards.writeTo(out); } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - innerToXContent(builder, params); - builder.endObject(); - return builder; - } - - public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(SETTINGS.getPreferredName()); - settings.toXContent(builder, params); - builder.endObject(); - - builder.startObject(MAPPINGS.getPreferredName()); - for (Map.Entry entry : mappings.entrySet()) { - try (InputStream stream = new BytesArray(entry.getValue()).streamInput()) { - builder.rawField(entry.getKey(), stream, XContentType.JSON); - } - } - builder.endObject(); - - builder.startObject(ALIASES.getPreferredName()); - for (Alias alias : aliases) { - alias.toXContent(builder, params); - } - builder.endObject(); - return builder; - } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 383945002c56c..94fec1d2a08f2 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -115,6 +115,7 @@ public CreateIndexRequestBuilder setSettings(Map source) { * @param source The mapping source * @param xContentType The content type of the source */ + @Deprecated public CreateIndexRequestBuilder addMapping(String type, String source, XContentType xContentType) { request.mapping(type, source, xContentType); return this; @@ -133,7 +134,9 @@ public CreateIndexRequestBuilder setCause(String cause) { * * @param type The mapping type * @param source The mapping source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequestBuilder addMapping(String type, XContentBuilder source) { request.mapping(type, source); return this; @@ -144,7 +147,9 @@ public CreateIndexRequestBuilder addMapping(String type, XContentBuilder source) * * @param type The mapping type * @param source The mapping source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequestBuilder addMapping(String type, Map source) { request.mapping(type, source); return this; @@ -153,7 +158,9 @@ public CreateIndexRequestBuilder addMapping(String type, Map sou /** * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequestBuilder addMapping(String type, Object... source) { request.mapping(type, source); return this; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java index 7efe88e9bbc83..4465dc88fe87d 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java @@ -33,8 +33,8 @@ package org.opensearch.action.admin.indices.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import org.apache.lucene.util.CollectionUtil; import org.opensearch.LegacyESVersion; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; @@ -45,27 +45,21 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentParser.Token; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Objects; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - /** * A response for a get index action. */ public class GetIndexResponse extends ActionResponse implements ToXContentObject { - private ImmutableOpenMap> mappings = ImmutableOpenMap.of(); + private ImmutableOpenMap mappings = ImmutableOpenMap.of(); private ImmutableOpenMap> aliases = ImmutableOpenMap.of(); private ImmutableOpenMap settings = ImmutableOpenMap.of(); private ImmutableOpenMap defaultSettings = ImmutableOpenMap.of(); @@ -74,7 +68,7 @@ public class GetIndexResponse extends ActionResponse implements ToXContentObject public GetIndexResponse( String[] indices, - ImmutableOpenMap> mappings, + ImmutableOpenMap mappings, ImmutableOpenMap> aliases, ImmutableOpenMap settings, ImmutableOpenMap defaultSettings, @@ -105,15 +99,26 @@ public GetIndexResponse( this.indices = in.readStringArray(); int mappingsSize = in.readVInt(); - ImmutableOpenMap.Builder> mappingsMapBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder mappingsMapBuilder = ImmutableOpenMap.builder(); for (int i = 0; i < mappingsSize; i++) { - String key = in.readString(); - int valueSize = in.readVInt(); - ImmutableOpenMap.Builder mappingEntryBuilder = ImmutableOpenMap.builder(); - for (int j = 0; j < valueSize; j++) { - mappingEntryBuilder.put(in.readString(), new MappingMetadata(in)); + String index = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + int numMappings = in.readVInt(); + if (numMappings == 0) { + mappingsMapBuilder.put(index, MappingMetadata.EMPTY_MAPPINGS); + } else if (numMappings == 1) { + String type = in.readString(); + if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + throw new IllegalStateException("Expected " + MapperService.SINGLE_MAPPING_NAME + " but got [" + type + "]"); + } + mappingsMapBuilder.put(index, new MappingMetadata(in)); + } else { + throw new IllegalStateException("Expected 0 or 1 mappings but got: " + numMappings); + } + } else { + final MappingMetadata metadata = in.readOptionalWriteable(MappingMetadata::new); + mappingsMapBuilder.put(index, metadata != null ? metadata : MappingMetadata.EMPTY_MAPPINGS); } - mappingsMapBuilder.put(key, mappingEntryBuilder.build()); } mappings = mappingsMapBuilder.build(); @@ -163,11 +168,11 @@ public String[] getIndices() { return indices(); } - public ImmutableOpenMap> mappings() { + public ImmutableOpenMap mappings() { return mappings; } - public ImmutableOpenMap> getMappings() { + public ImmutableOpenMap getMappings() { return mappings(); } @@ -235,12 +240,16 @@ public String getSetting(String index, String setting) { public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(indices); out.writeVInt(mappings.size()); - for (ObjectObjectCursor> indexEntry : mappings) { + for (ObjectObjectCursor indexEntry : mappings) { out.writeString(indexEntry.key); - out.writeVInt(indexEntry.value.size()); - for (ObjectObjectCursor mappingEntry : indexEntry.value) { - out.writeString(mappingEntry.key); - mappingEntry.value.writeTo(out); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(indexEntry.value == MappingMetadata.EMPTY_MAPPINGS ? 0 : 1); + if (indexEntry.value != MappingMetadata.EMPTY_MAPPINGS) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + indexEntry.value.writeTo(out); + } + } else { + out.writeOptionalWriteable(indexEntry.value); } } out.writeVInt(aliases.size()); @@ -286,29 +295,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.endObject(); - ImmutableOpenMap indexMappings = mappings.get(index); - boolean includeTypeName = params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (includeTypeName) { - builder.startObject("mappings"); - if (indexMappings != null) { - for (final ObjectObjectCursor typeEntry : indexMappings) { - builder.field(typeEntry.key); - builder.map(typeEntry.value.sourceAsMap()); - } - } - builder.endObject(); + MappingMetadata indexMappings = mappings.get(index); + if (indexMappings == null) { + builder.startObject("mappings").endObject(); } else { - MappingMetadata mappings = null; - for (final ObjectObjectCursor typeEntry : indexMappings) { - assert mappings == null; - mappings = typeEntry.value; - } - if (mappings == null) { - // no mappings yet - builder.startObject("mappings").endObject(); - } else { - builder.field("mappings", mappings.sourceAsMap()); - } + builder.field("mappings", indexMappings.sourceAsMap()); } builder.startObject("settings"); @@ -337,141 +328,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private static List parseAliases(XContentParser parser) throws IOException { - List indexAliases = new ArrayList<>(); - // We start at START_OBJECT since parseIndexEntry ensures that - while (parser.nextToken() != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser); - indexAliases.add(AliasMetadata.Builder.fromXContent(parser)); - } - return indexAliases; - } - - private static ImmutableOpenMap parseMappings(XContentParser parser) throws IOException { - ImmutableOpenMap.Builder indexMappings = ImmutableOpenMap.builder(); - // We start at START_OBJECT since parseIndexEntry ensures that - while (parser.nextToken() != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser); - parser.nextToken(); - if (parser.currentToken() == Token.START_OBJECT) { - String mappingType = parser.currentName(); - indexMappings.put(mappingType, new MappingMetadata(mappingType, parser.map())); - } else if (parser.currentToken() == Token.START_ARRAY) { - parser.skipChildren(); - } - } - return indexMappings.build(); - } - - private static IndexEntry parseIndexEntry(XContentParser parser) throws IOException { - List indexAliases = null; - ImmutableOpenMap indexMappings = null; - Settings indexSettings = null; - Settings indexDefaultSettings = null; - String dataStream = null; - // We start at START_OBJECT since fromXContent ensures that - while (parser.nextToken() != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser); - parser.nextToken(); - if (parser.currentToken() == Token.START_OBJECT) { - switch (parser.currentName()) { - case "aliases": - indexAliases = parseAliases(parser); - break; - case "mappings": - indexMappings = parseMappings(parser); - break; - case "settings": - indexSettings = Settings.fromXContent(parser); - break; - case "defaults": - indexDefaultSettings = Settings.fromXContent(parser); - break; - default: - parser.skipChildren(); - } - } else if (parser.currentToken() == Token.VALUE_STRING) { - if (parser.currentName().equals("data_stream")) { - dataStream = parser.text(); - } - parser.skipChildren(); - } else if (parser.currentToken() == Token.START_ARRAY) { - parser.skipChildren(); - } - } - return new IndexEntry(indexAliases, indexMappings, indexSettings, indexDefaultSettings, dataStream); - } - - // This is just an internal container to make stuff easier for returning - private static class IndexEntry { - List indexAliases = new ArrayList<>(); - ImmutableOpenMap indexMappings = ImmutableOpenMap.of(); - Settings indexSettings = Settings.EMPTY; - Settings indexDefaultSettings = Settings.EMPTY; - String dataStream; - - IndexEntry( - List indexAliases, - ImmutableOpenMap indexMappings, - Settings indexSettings, - Settings indexDefaultSettings, - String dataStream - ) { - if (indexAliases != null) this.indexAliases = indexAliases; - if (indexMappings != null) this.indexMappings = indexMappings; - if (indexSettings != null) this.indexSettings = indexSettings; - if (indexDefaultSettings != null) this.indexDefaultSettings = indexDefaultSettings; - if (dataStream != null) this.dataStream = dataStream; - } - } - - public static GetIndexResponse fromXContent(XContentParser parser) throws IOException { - ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder dataStreams = ImmutableOpenMap.builder(); - List indices = new ArrayList<>(); - - if (parser.currentToken() == null) { - parser.nextToken(); - } - ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser); - parser.nextToken(); - - while (!parser.isClosed()) { - if (parser.currentToken() == Token.START_OBJECT) { - // we assume this is an index entry - String indexName = parser.currentName(); - indices.add(indexName); - IndexEntry indexEntry = parseIndexEntry(parser); - // make the order deterministic - CollectionUtil.timSort(indexEntry.indexAliases, Comparator.comparing(AliasMetadata::alias)); - aliases.put(indexName, Collections.unmodifiableList(indexEntry.indexAliases)); - mappings.put(indexName, indexEntry.indexMappings); - settings.put(indexName, indexEntry.indexSettings); - if (indexEntry.indexDefaultSettings.isEmpty() == false) { - defaultSettings.put(indexName, indexEntry.indexDefaultSettings); - } - if (indexEntry.dataStream != null) { - dataStreams.put(indexName, indexEntry.dataStream); - } - } else if (parser.currentToken() == Token.START_ARRAY) { - parser.skipChildren(); - } else { - parser.nextToken(); - } - } - return new GetIndexResponse( - indices.toArray(new String[0]), - mappings.build(), - aliases.build(), - settings.build(), - defaultSettings.build(), - dataStreams.build() - ); - } - @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/get/TransportGetIndexAction.java b/server/src/main/java/org/opensearch/action/admin/indices/get/TransportGetIndexAction.java index 872cc66f8c1ba..0cd3214307359 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/get/TransportGetIndexAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/get/TransportGetIndexAction.java @@ -102,7 +102,7 @@ protected void doMasterOperation( final ClusterState state, final ActionListener listener ) { - ImmutableOpenMap> mappingsResult = ImmutableOpenMap.of(); + ImmutableOpenMap mappingsResult = ImmutableOpenMap.of(); ImmutableOpenMap> aliasesResult = ImmutableOpenMap.of(); ImmutableOpenMap settings = ImmutableOpenMap.of(); ImmutableOpenMap defaultSettings = ImmutableOpenMap.of(); @@ -121,8 +121,7 @@ protected void doMasterOperation( case MAPPINGS: if (!doneMappings) { try { - mappingsResult = state.metadata() - .findMappings(concreteIndices, request.types(), indicesService.getFieldFilter()); + mappingsResult = state.metadata().findMappings(concreteIndices, indicesService.getFieldFilter()); doneMappings = true; } catch (IOException e) { listener.onFailure(e); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index d486a102d1a21..713c842e07dad 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -47,7 +47,6 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.Mapper; -import org.opensearch.rest.BaseRestHandler; import java.io.IOException; import java.io.InputStream; @@ -58,8 +57,6 @@ import static java.util.Collections.unmodifiableMap; import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; /** * Response object for {@link GetFieldMappingsRequest} API @@ -100,6 +97,7 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte }, MAPPINGS, ObjectParser.ValueType.OBJECT); } + // todo remove middle `type` level private final Map>> mappings; GetFieldMappingsResponse(Map>> mappings) { @@ -154,28 +152,18 @@ public FieldMappingMetadata fieldMappings(String index, String type, String fiel @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeTypeName = params.paramAsBoolean(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - builder.startObject(); for (Map.Entry>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); - if (includeTypeName == false) { - Map mappings = null; - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - assert mappings == null; - mappings = typeEntry.getValue(); - } - if (mappings != null) { - addFieldMappingsToBuilder(builder, params, mappings); - } - } else { - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - builder.startObject(typeEntry.getKey()); - addFieldMappingsToBuilder(builder, params, typeEntry.getValue()); - builder.endObject(); - } + Map mappings = null; + for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { + assert mappings == null; + mappings = typeEntry.getValue(); + } + if (mappings != null) { + addFieldMappingsToBuilder(builder, params, mappings); } builder.endObject(); @@ -194,24 +182,6 @@ private void addFieldMappingsToBuilder(XContentBuilder builder, Params params, M } } - public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - final Map>> mappings = new HashMap<>(); - if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - while (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - final String index = parser.currentName(); - - final Map> typeMappings = PARSER.parse(parser, index); - mappings.put(index, typeMappings); - - parser.nextToken(); - } - } - - return new GetFieldMappingsResponse(mappings); - } - public static class FieldMappingMetadata implements ToXContentFragment { public static final FieldMappingMetadata NULL = new FieldMappingMetadata("", BytesArray.EMPTY); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java index 0087271147f4a..d203a5e6a45fe 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java @@ -33,6 +33,7 @@ package org.opensearch.action.admin.indices.mapping.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.ParseField; @@ -42,119 +43,82 @@ import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.ToXContentFragment; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.rest.BaseRestHandler; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; public class GetMappingsResponse extends ActionResponse implements ToXContentFragment { private static final ParseField MAPPINGS = new ParseField("mappings"); - private ImmutableOpenMap> mappings = ImmutableOpenMap.of(); + private final ImmutableOpenMap mappings; - public GetMappingsResponse(ImmutableOpenMap> mappings) { + public GetMappingsResponse(ImmutableOpenMap mappings) { this.mappings = mappings; } GetMappingsResponse(StreamInput in) throws IOException { super(in); int size = in.readVInt(); - ImmutableOpenMap.Builder> indexMapBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder indexMapBuilder = ImmutableOpenMap.builder(); for (int i = 0; i < size; i++) { - String key = in.readString(); - int valueSize = in.readVInt(); - ImmutableOpenMap.Builder typeMapBuilder = ImmutableOpenMap.builder(); - for (int j = 0; j < valueSize; j++) { - typeMapBuilder.put(in.readString(), new MappingMetadata(in)); + String index = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + int mappingCount = in.readVInt(); + if (mappingCount == 0) { + indexMapBuilder.put(index, MappingMetadata.EMPTY_MAPPINGS); + } else if (mappingCount == 1) { + String type = in.readString(); + if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + throw new IllegalStateException("Expected " + MapperService.SINGLE_MAPPING_NAME + " but got [" + type + "]"); + } + indexMapBuilder.put(index, new MappingMetadata(in)); + } else { + throw new IllegalStateException("Expected 0 or 1 mappings but got: " + mappingCount); + } + } else { + boolean hasMapping = in.readBoolean(); + indexMapBuilder.put(index, hasMapping ? new MappingMetadata(in) : MappingMetadata.EMPTY_MAPPINGS); } - indexMapBuilder.put(key, typeMapBuilder.build()); } mappings = indexMapBuilder.build(); } - public ImmutableOpenMap> mappings() { + public ImmutableOpenMap mappings() { return mappings; } - public ImmutableOpenMap> getMappings() { + public ImmutableOpenMap getMappings() { return mappings(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(mappings.size()); - for (ObjectObjectCursor> indexEntry : mappings) { + for (ObjectObjectCursor indexEntry : mappings) { out.writeString(indexEntry.key); - out.writeVInt(indexEntry.value.size()); - for (ObjectObjectCursor typeEntry : indexEntry.value) { - out.writeString(typeEntry.key); - typeEntry.value.writeTo(out); - } - } - } - - public static GetMappingsResponse fromXContent(XContentParser parser) throws IOException { - if (parser.currentToken() == null) { - parser.nextToken(); - } - assert parser.currentToken() == XContentParser.Token.START_OBJECT; - Map parts = parser.map(); - - ImmutableOpenMap.Builder> builder = new ImmutableOpenMap.Builder<>(); - for (Map.Entry entry : parts.entrySet()) { - final String indexName = entry.getKey(); - assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass(); - final Map mapping = (Map) ((Map) entry.getValue()).get(MAPPINGS.getPreferredName()); - - ImmutableOpenMap.Builder typeBuilder = new ImmutableOpenMap.Builder<>(); - for (Map.Entry typeEntry : mapping.entrySet()) { - final String typeName = typeEntry.getKey(); - assert typeEntry.getValue() instanceof Map : "expected a map as inner type mapping, but got: " - + typeEntry.getValue().getClass(); - final Map fieldMappings = (Map) typeEntry.getValue(); - MappingMetadata mmd = new MappingMetadata(typeName, fieldMappings); - typeBuilder.put(typeName, mmd); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(indexEntry.value == MappingMetadata.EMPTY_MAPPINGS ? 0 : 1); + if (indexEntry.value != MappingMetadata.EMPTY_MAPPINGS) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + indexEntry.value.writeTo(out); + } + } else { + out.writeOptionalWriteable(indexEntry.value); } - builder.put(indexName, typeBuilder.build()); } - - return new GetMappingsResponse(builder.build()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeTypeName = params.paramAsBoolean(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - - for (final ObjectObjectCursor> indexEntry : getMappings()) { - builder.startObject(indexEntry.key); - { - if (includeTypeName == false) { - MappingMetadata mappings = null; - for (final ObjectObjectCursor typeEntry : indexEntry.value) { - assert mappings == null; - mappings = typeEntry.value; - } - if (mappings == null) { - // no mappings yet - builder.startObject(MAPPINGS.getPreferredName()).endObject(); - } else { - builder.field(MAPPINGS.getPreferredName(), mappings.sourceAsMap()); - } - } else { - builder.startObject(MAPPINGS.getPreferredName()); - { - for (final ObjectObjectCursor typeEntry : indexEntry.value) { - builder.field(typeEntry.key, typeEntry.value.sourceAsMap()); - } - } - builder.endObject(); - } + for (final ObjectObjectCursor indexEntry : getMappings()) { + if (indexEntry.value != null) { + builder.startObject(indexEntry.key); + builder.field(MAPPINGS.getPreferredName(), indexEntry.value.sourceAsMap()); + builder.endObject(); + } else { + builder.startObject(MAPPINGS.getPreferredName()).endObject(); } - builder.endObject(); } return builder; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java index 007550f73f205..3f6cb8ed35af9 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java @@ -91,8 +91,8 @@ protected void doMasterOperation( ) { logger.trace("serving getMapping request based on version {}", state.version()); try { - ImmutableOpenMap> result = state.metadata() - .findMappings(concreteIndices, request.types(), indicesService.getFieldFilter()); + ImmutableOpenMap result = state.metadata() + .findMappings(concreteIndices, indicesService.getFieldFilter()); listener.onResponse(new GetMappingsResponse(result)); } catch (IOException e) { listener.onFailure(e); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java index 2237ac573570a..27081048fcdae 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java @@ -39,29 +39,13 @@ */ public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpdateRequest { - private String type; - private String source; - public PutMappingClusterStateUpdateRequest() { - - } - - public String type() { - return type; - } - - public PutMappingClusterStateUpdateRequest type(String type) { - this.type = type; - return this; + public PutMappingClusterStateUpdateRequest(String source) { + this.source = source; } public String source() { return source; } - - public PutMappingClusterStateUpdateRequest source(String source) { - this.source = source; - return this; - } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java index d8b3b781b6787..52be45054ba55 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -35,6 +35,7 @@ import com.carrotsearch.hppc.ObjectHashSet; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchGenerationException; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.IndicesRequest; import org.opensearch.action.support.IndicesOptions; @@ -52,6 +53,7 @@ import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.Index; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.io.InputStream; @@ -63,7 +65,7 @@ import static org.opensearch.action.ValidateActions.addValidationError; /** - * Puts mapping definition registered under a specific type into one or more indices. Best created with + * Puts mapping definition into one or more indices. Best created with * {@link org.opensearch.client.Requests#putMappingRequest(String...)}. *

        * If the mappings already exists, the new mappings will be merged with the new one. If there are elements @@ -95,8 +97,6 @@ public class PutMappingRequest extends AcknowledgedRequest im private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); - private String type; - private String source; private String origin = ""; @@ -108,7 +108,12 @@ public PutMappingRequest(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); indicesOptions = IndicesOptions.readIndicesOptions(in); - type = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readOptionalString(); + if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + throw new IllegalArgumentException("Expected type [_doc] but received [" + type + "]"); + } + } source = in.readString(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { in.readBoolean(); // updateAllTypes @@ -133,11 +138,6 @@ public PutMappingRequest(String... indices) { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - if (type == null) { - validationException = addValidationError("mapping type is missing", validationException); - } else if (type.isEmpty()) { - validationException = addValidationError("mapping type is empty", validationException); - } if (source == null) { validationException = addValidationError("mapping source is missing", validationException); } else if (source.isEmpty()) { @@ -203,21 +203,6 @@ public boolean includeDataStreams() { return true; } - /** - * The mapping type. - */ - public String type() { - return type; - } - - /** - * The type of the mappings. - */ - public PutMappingRequest type(String type) { - this.type = type; - return this; - } - /** * The mapping source definition. */ @@ -233,7 +218,7 @@ public String source() { * mapping fields will automatically be put on the top level mapping object. */ public PutMappingRequest source(Object... source) { - return source(buildFromSimplifiedDef(type, source)); + return source(buildFromSimplifiedDef(source)); } public String origin() { @@ -247,8 +232,6 @@ public PutMappingRequest origin(String origin) { } /** - * @param type - * the mapping type * @param source * consisting of field/properties pairs (e.g. "field1", * "type=string,store=true") @@ -256,16 +239,13 @@ public PutMappingRequest origin(String origin) { * if the number of the source arguments is not divisible by two * @return the mappings definition */ - public static XContentBuilder buildFromSimplifiedDef(String type, Object... source) { + public static XContentBuilder buildFromSimplifiedDef(Object... source) { if (source.length % 2 != 0) { throw new IllegalArgumentException("mapping source must be pairs of fieldnames and properties definition."); } try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); - if (type != null) { - builder.startObject(type); - } for (int i = 0; i < source.length; i++) { String fieldName = source[i++].toString(); @@ -302,9 +282,6 @@ public static XContentBuilder buildFromSimplifiedDef(String type, Object... sour builder.endObject(); } builder.endObject(); - if (type != null) { - builder.endObject(); - } builder.endObject(); return builder; } catch (Exception e) { @@ -366,7 +343,9 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeStringArrayNullable(indices); indicesOptions.writeIndicesOptions(out); - out.writeOptionalString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(source); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { out.writeBoolean(true); // updateAllTypes diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java index fcf35891df872..a1b3b40d4e961 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java @@ -74,14 +74,6 @@ public PutMappingRequestBuilder setIndicesOptions(IndicesOptions options) { return this; } - /** - * The type of the mappings. - */ - public PutMappingRequestBuilder setType(String type) { - request.type(type); - return this; - } - /** * The mapping source definition. */ diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 6c580ec8aa22e..f1093a15a3d26 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -132,10 +132,7 @@ protected void masterOperation( } performMappingUpdate(concreteIndices, request, listener, metadataMappingService); } catch (IndexNotFoundException ex) { - logger.debug( - () -> new ParameterizedMessage("failed to put mappings on indices [{}], type [{}]", request.indices(), request.type()), - ex - ); + logger.debug(() -> new ParameterizedMessage("failed to put mappings on indices [{}]", Arrays.asList(request.indices())), ex); throw ex; } } @@ -170,11 +167,9 @@ static void performMappingUpdate( ActionListener listener, MetadataMappingService metadataMappingService ) { - PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest().ackTimeout(request.timeout()) - .masterNodeTimeout(request.masterNodeTimeout()) - .indices(concreteIndices) - .type(request.type()) - .source(request.source()); + PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest(request.source()).indices( + concreteIndices + ).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()); metadataMappingService.putMapping(updateRequest, new ActionListener() { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java index 94028f315a704..f06cb599a60df 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java @@ -43,8 +43,6 @@ import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.ObjectParser; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.mapper.MapperService; @@ -60,9 +58,9 @@ * Note: there is a new class with the same name for the Java HLRC that uses a typeless format. * Any changes done to this class should also go to that client class. */ -public class RolloverRequest extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { +public class RolloverRequest extends AcknowledgedRequest implements IndicesRequest { - private static final ObjectParser PARSER = new ObjectParser<>("rollover"); + private static final ObjectParser PARSER = new ObjectParser<>("rollover"); private static final ObjectParser>, Void> CONDITION_PARSER = new ObjectParser<>("conditions"); private static final ParseField CONDITIONS = new ParseField("conditions"); @@ -97,24 +95,13 @@ public class RolloverRequest extends AcknowledgedRequest implem CreateIndexRequest.SETTINGS, ObjectParser.ValueType.OBJECT ); - PARSER.declareField((parser, request, includeTypeName) -> { - if (includeTypeName) { - for (Map.Entry mappingsEntry : parser.map().entrySet()) { - request.createIndexRequest.mapping(mappingsEntry.getKey(), (Map) mappingsEntry.getValue()); - } - } else { - // a type is not included, add a dummy _doc type - Map mappings = parser.map(); - if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { - throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." - ); - } - request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappings); + PARSER.declareField((parser, request, context) -> { + // a type is not included, add a dummy _doc type + Map mappings = parser.map(); + if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { + throw new IllegalArgumentException("The mapping definition cannot be nested under a type"); } + request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappings); }, CreateIndexRequest.MAPPINGS, ObjectParser.ValueType.OBJECT); PARSER.declareField( (parser, request, context) -> request.createIndexRequest.aliases(parser.map()), @@ -273,23 +260,8 @@ public CreateIndexRequest getCreateIndexRequest() { return createIndexRequest; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - createIndexRequest.innerToXContent(builder, params); - - builder.startObject(CONDITIONS.getPreferredName()); - for (Condition condition : conditions.values()) { - condition.toXContent(builder, params); - } - builder.endObject(); - - builder.endObject(); - return builder; - } - // param isTypeIncluded decides how mappings should be parsed from XContent - public void fromXContent(boolean isTypeIncluded, XContentParser parser) throws IOException { - PARSER.parse(parser, this, isTypeIncluded); + public void fromXContent(XContentParser parser) throws IOException { + PARSER.parse(parser, this, null); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java index 5a596b090133f..e6d487e0a40b3 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java @@ -38,15 +38,13 @@ import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import static java.util.Collections.singletonMap; -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; public class GetIndexTemplatesResponse extends ActionResponse implements ToXContentObject { @@ -57,7 +55,7 @@ public GetIndexTemplatesResponse(StreamInput in) throws IOException { int size = in.readVInt(); indexTemplates = new ArrayList<>(); for (int i = 0; i < size; i++) { - indexTemplates.add(0, IndexTemplateMetadata.readFrom(in)); + indexTemplates.add(IndexTemplateMetadata.readFrom(in)); } } @@ -77,32 +75,28 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetIndexTemplatesResponse that = (GetIndexTemplatesResponse) o; + return Objects.equals(indexTemplates, that.indexTemplates); + } + + @Override + public int hashCode() { + return Objects.hash(indexTemplates); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { params = new ToXContent.DelegatingMapParams(singletonMap("reduce_mappings", "true"), params); - boolean includeTypeName = params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - builder.startObject(); for (IndexTemplateMetadata indexTemplateMetadata : getIndexTemplates()) { - if (includeTypeName) { - IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, params); - } else { - IndexTemplateMetadata.Builder.toXContent(indexTemplateMetadata, builder, params); - } + IndexTemplateMetadata.Builder.toXContent(indexTemplateMetadata, builder, params); } builder.endObject(); return builder; } - - public static GetIndexTemplatesResponse fromXContent(XContentParser parser) throws IOException { - final List templates = new ArrayList<>(); - for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { - if (token == XContentParser.Token.FIELD_NAME) { - final IndexTemplateMetadata templateMetadata = IndexTemplateMetadata.Builder.fromXContent(parser, parser.currentName()); - templates.add(templateMetadata); - } - } - return new GetIndexTemplatesResponse(templates); - } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index d331a1f9a559e..2ea2e492ffe4d 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -304,7 +304,7 @@ public PutIndexTemplateRequest mapping(String type, Map source) * ("field1", "type=string,store=true"). */ public PutIndexTemplateRequest mapping(String type, Object... source) { - mapping(type, PutMappingRequest.buildFromSimplifiedDef(type, source)); + mapping(type, PutMappingRequest.buildFromSimplifiedDef(source)); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java index 2b3a55d7d5988..4d6525d002381 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java @@ -32,8 +32,8 @@ package org.opensearch.action.admin.indices.validate.query; +import org.opensearch.Version; import org.opensearch.action.support.broadcast.BroadcastShardRequest; -import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.index.query.QueryBuilder; @@ -49,7 +49,6 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { private QueryBuilder query; - private String[] types = Strings.EMPTY_ARRAY; private boolean explain; private boolean rewrite; private long nowInMillis; @@ -58,12 +57,12 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { public ShardValidateQueryRequest(StreamInput in) throws IOException { super(in); query = in.readNamedWriteable(QueryBuilder.class); - - int typesSize = in.readVInt(); - if (typesSize > 0) { - types = new String[typesSize]; - for (int i = 0; i < typesSize; i++) { - types[i] = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + int typesSize = in.readVInt(); + if (typesSize > 0) { + for (int i = 0; i < typesSize; i++) { + in.readString(); + } } } filteringAliases = new AliasFilter(in); @@ -75,7 +74,6 @@ public ShardValidateQueryRequest(StreamInput in) throws IOException { public ShardValidateQueryRequest(ShardId shardId, AliasFilter filteringAliases, ValidateQueryRequest request) { super(shardId, request); this.query = request.query(); - this.types = request.types(); this.explain = request.explain(); this.rewrite = request.rewrite(); this.filteringAliases = Objects.requireNonNull(filteringAliases, "filteringAliases must not be null"); @@ -86,10 +84,6 @@ public QueryBuilder query() { return query; } - public String[] types() { - return this.types; - } - public boolean explain() { return this.explain; } @@ -110,9 +104,8 @@ public long nowInMillis() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeNamedWriteable(query); - out.writeVInt(types.length); - for (String type : types) { - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(0); // no types to filter } filteringAliases.writeTo(out); out.writeBoolean(explain); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 81c42ded2ce39..1bb85c4e84483 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -32,6 +32,7 @@ package org.opensearch.action.admin.indices.validate.query; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.ValidateActions; import org.opensearch.action.support.IndicesOptions; @@ -60,8 +61,6 @@ public class ValidateQueryRequest extends BroadcastRequest private boolean rewrite; private boolean allShards; - private String[] types = Strings.EMPTY_ARRAY; - long nowInMillis; public ValidateQueryRequest() { @@ -71,11 +70,12 @@ public ValidateQueryRequest() { public ValidateQueryRequest(StreamInput in) throws IOException { super(in); query = in.readNamedWriteable(QueryBuilder.class); - int typesSize = in.readVInt(); - if (typesSize > 0) { - types = new String[typesSize]; - for (int i = 0; i < typesSize; i++) { - types[i] = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + int typesSize = in.readVInt(); + if (typesSize > 0) { + for (int i = 0; i < typesSize; i++) { + in.readString(); + } } } explain = in.readBoolean(); @@ -113,29 +113,6 @@ public ValidateQueryRequest query(QueryBuilder query) { return this; } - /** - * The types of documents the query will run against. Defaults to all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public String[] types() { - return this.types; - } - - /** - * The types of documents the query will run against. Defaults to all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public ValidateQueryRequest types(String... types) { - this.types = types; - return this; - } - /** * Indicate if detailed information about query is requested */ @@ -182,9 +159,8 @@ public boolean allShards() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeNamedWriteable(query); - out.writeVInt(types.length); - for (String type : types) { - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(0); // no types to filter } out.writeBoolean(explain); out.writeBoolean(rewrite); @@ -196,8 +172,7 @@ public String toString() { return "[" + Arrays.toString(indices) + "]" - + Arrays.toString(types) - + ", query[" + + " query[" + query + "], explain:" + explain diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java index de4f619804b20..88261e6536240 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java @@ -45,14 +45,6 @@ public ValidateQueryRequestBuilder(OpenSearchClient client, ValidateQueryAction super(client, action, new ValidateQueryRequest()); } - /** - * The types of documents the query will run against. Defaults to all types. - */ - public ValidateQueryRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - /** * The query to validate. * diff --git a/server/src/main/java/org/opensearch/action/bulk/MappingUpdatePerformer.java b/server/src/main/java/org/opensearch/action/bulk/MappingUpdatePerformer.java index aa24c19bb3e95..c0eb29e4c112f 100644 --- a/server/src/main/java/org/opensearch/action/bulk/MappingUpdatePerformer.java +++ b/server/src/main/java/org/opensearch/action/bulk/MappingUpdatePerformer.java @@ -41,6 +41,6 @@ public interface MappingUpdatePerformer { /** * Update the mappings on the master. */ - void updateMappings(Mapping update, ShardId shardId, String type, ActionListener listener); + void updateMappings(Mapping update, ShardId shardId, ActionListener listener); } diff --git a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java index ed407bd37d684..f3ab9673a0201 100644 --- a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java @@ -162,10 +162,10 @@ protected void dispatchedShardOperationOnPrimary( ActionListener> listener ) { ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout(), logger, threadPool.getThreadContext()); - performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, (update, shardId, type, mappingListener) -> { + performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, (update, shardId, mappingListener) -> { assert update != null; assert shardId != null; - mappingUpdatedAction.updateMappingOnMaster(shardId.getIndex(), type, update, mappingListener); + mappingUpdatedAction.updateMappingOnMaster(shardId.getIndex(), update, mappingListener); }, mappingUpdateListener -> observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { @@ -371,7 +371,7 @@ static boolean executeBulkItemRequest( primary.mapperService() .merge( MapperService.SINGLE_MAPPING_NAME, - new CompressedXContent(result.getRequiredMappingUpdate(), XContentType.JSON, ToXContent.EMPTY_PARAMS), + new CompressedXContent(result.getRequiredMappingUpdate(), ToXContent.EMPTY_PARAMS), MapperService.MergeReason.MAPPING_UPDATE_PREFLIGHT ); } catch (Exception e) { @@ -380,37 +380,32 @@ static boolean executeBulkItemRequest( return true; } - mappingUpdater.updateMappings( - result.getRequiredMappingUpdate(), - primary.shardId(), - MapperService.SINGLE_MAPPING_NAME, - new ActionListener() { - @Override - public void onResponse(Void v) { - context.markAsRequiringMappingUpdate(); - waitForMappingUpdate.accept(ActionListener.runAfter(new ActionListener() { - @Override - public void onResponse(Void v) { - assert context.requiresWaitingForMappingUpdate(); - context.resetForExecutionForRetry(); - } - - @Override - public void onFailure(Exception e) { - context.failOnMappingUpdate(e); - } - }, () -> itemDoneListener.onResponse(null))); - } + mappingUpdater.updateMappings(result.getRequiredMappingUpdate(), primary.shardId(), new ActionListener() { + @Override + public void onResponse(Void v) { + context.markAsRequiringMappingUpdate(); + waitForMappingUpdate.accept(ActionListener.runAfter(new ActionListener() { + @Override + public void onResponse(Void v) { + assert context.requiresWaitingForMappingUpdate(); + context.resetForExecutionForRetry(); + } - @Override - public void onFailure(Exception e) { - onComplete(exceptionToResult(e, primary, isDelete, version), context, updateResult); - // Requesting mapping update failed, so we don't have to wait for a cluster state update - assert context.isInitial(); - itemDoneListener.onResponse(null); - } + @Override + public void onFailure(Exception e) { + context.failOnMappingUpdate(e); + } + }, () -> itemDoneListener.onResponse(null))); } - ); + + @Override + public void onFailure(Exception e) { + onComplete(exceptionToResult(e, primary, isDelete, version), context, updateResult); + // Requesting mapping update failed, so we don't have to wait for a cluster state update + assert context.isInitial(); + itemDoneListener.onResponse(null); + } + }); return false; } else { onComplete(result, context, updateResult); diff --git a/server/src/main/java/org/opensearch/action/delete/DeleteRequestBuilder.java b/server/src/main/java/org/opensearch/action/delete/DeleteRequestBuilder.java index 28abf092ad72d..f6ee0f4a7b278 100644 --- a/server/src/main/java/org/opensearch/action/delete/DeleteRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/delete/DeleteRequestBuilder.java @@ -53,15 +53,6 @@ public DeleteRequestBuilder(OpenSearchClient client, DeleteAction action, @Nulla super(client, action, new DeleteRequest(index)); } - /** - * Sets the type of the document to delete. - * @deprecated types will be removed - */ - @Deprecated - public DeleteRequestBuilder setType(String type) { - return this; - } - /** * Sets the id of the document to delete. */ diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/index/IndexRequestBuilder.java index f31efa3fc95d8..cef5ef0f85c62 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/index/IndexRequestBuilder.java @@ -59,15 +59,6 @@ public IndexRequestBuilder(OpenSearchClient client, IndexAction action, @Nullabl super(client, action, new IndexRequest(index)); } - /** - * Sets the type to index the document to. - * @deprecated types will be removed - */ - @Deprecated - public IndexRequestBuilder setType(String type) { - return this; - } - /** * Sets the id to index the document under. Optional, and if not set, one will be automatically * generated. diff --git a/server/src/main/java/org/opensearch/action/support/master/info/ClusterInfoRequest.java b/server/src/main/java/org/opensearch/action/support/master/info/ClusterInfoRequest.java index 018464a5a0cd7..0b392caa3e588 100644 --- a/server/src/main/java/org/opensearch/action/support/master/info/ClusterInfoRequest.java +++ b/server/src/main/java/org/opensearch/action/support/master/info/ClusterInfoRequest.java @@ -32,6 +32,7 @@ package org.opensearch.action.support.master.info; +import org.opensearch.Version; import org.opensearch.action.IndicesRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.master.MasterNodeReadRequest; @@ -46,7 +47,6 @@ public abstract class ClusterInfoRequest SYSTEM_PROPERTIES; static { diff --git a/server/src/main/java/org/opensearch/bootstrap/SystemCallFilter.java b/server/src/main/java/org/opensearch/bootstrap/SystemCallFilter.java index 434af8fceb1dc..8e179de9c28df 100644 --- a/server/src/main/java/org/opensearch/bootstrap/SystemCallFilter.java +++ b/server/src/main/java/org/opensearch/bootstrap/SystemCallFilter.java @@ -227,7 +227,7 @@ static SockFilter BPF_JUMP(int code, int k, int jt, int jf) { static class Arch { /** AUDIT_ARCH_XXX constant from linux/audit.h */ final int audit; - /** syscall limit (necessary for blacklisting on amd64, to ban 32-bit syscalls) */ + /** syscall limit (necessary for denylisting on amd64, to ban 32-bit syscalls) */ final int limit; /** __NR_fork */ final int fork; diff --git a/server/src/main/java/org/opensearch/client/Client.java b/server/src/main/java/org/opensearch/client/Client.java index 0be8b4a1573d5..bca68834ca3cf 100644 --- a/server/src/main/java/org/opensearch/client/Client.java +++ b/server/src/main/java/org/opensearch/client/Client.java @@ -112,7 +112,7 @@ public interface Client extends OpenSearchClient, Releasable { AdminClient admin(); /** - * Index a JSON source associated with a given index and type. + * Index a JSON source associated with a given index. *

        * The id is optional, if it is not provided, one will be generated automatically. * @@ -123,7 +123,7 @@ public interface Client extends OpenSearchClient, Releasable { ActionFuture index(IndexRequest request); /** - * Index a document associated with a given index and type. + * Index a document associated with a given index. *

        * The id is optional, if it is not provided, one will be generated automatically. * @@ -134,12 +134,21 @@ public interface Client extends OpenSearchClient, Releasable { void index(IndexRequest request, ActionListener listener); /** - * Index a document associated with a given index and type. + * Index a document associated with a given index. *

        * The id is optional, if it is not provided, one will be generated automatically. */ IndexRequestBuilder prepareIndex(); + /** + * Index a document associated with a given index. + *

        + * The id is optional, if it is not provided, one will be generated automatically. + * + * @param index The index to index the document to + */ + IndexRequestBuilder prepareIndex(String index); + /** * Updates a document based on a script. * @@ -164,31 +173,10 @@ public interface Client extends OpenSearchClient, Releasable { /** * Updates a document based on a script. */ - UpdateRequestBuilder prepareUpdate(String index, String type, String id); + UpdateRequestBuilder prepareUpdate(String index, String id); /** - * Index a document associated with a given index and type. - *

        - * The id is optional, if it is not provided, one will be generated automatically. - * - * @param index The index to index the document to - * @param type The type to index the document to - */ - IndexRequestBuilder prepareIndex(String index, String type); - - /** - * Index a document associated with a given index and type. - *

        - * The id is optional, if it is not provided, one will be generated automatically. - * - * @param index The index to index the document to - * @param type The type to index the document to - * @param id The id of the document - */ - IndexRequestBuilder prepareIndex(String index, String type, @Nullable String id); - - /** - * Deletes a document from the index based on the index, type and id. + * Deletes a document from the index based on the index, and id. * * @param request The delete request * @return The result future @@ -197,7 +185,7 @@ public interface Client extends OpenSearchClient, Releasable { ActionFuture delete(DeleteRequest request); /** - * Deletes a document from the index based on the index, type and id. + * Deletes a document from the index based on the index, and id. * * @param request The delete request * @param listener A listener to be notified with a result @@ -206,18 +194,17 @@ public interface Client extends OpenSearchClient, Releasable { void delete(DeleteRequest request, ActionListener listener); /** - * Deletes a document from the index based on the index, type and id. + * Deletes a document from the index based on the index, and id. */ DeleteRequestBuilder prepareDelete(); /** - * Deletes a document from the index based on the index, type and id. + * Deletes a document from the index based on the index, and id. * * @param index The index to delete the document from - * @param type The type of the document to delete * @param id The id of the document to delete */ - DeleteRequestBuilder prepareDelete(String index, String type, String id); + DeleteRequestBuilder prepareDelete(String index, String id); /** * Executes a bulk of index / delete operations. @@ -248,7 +235,7 @@ public interface Client extends OpenSearchClient, Releasable { BulkRequestBuilder prepareBulk(@Nullable String globalIndex); /** - * Gets the document that was indexed from an index with a type and id. + * Gets the document that was indexed from an index with an id. * * @param request The get request * @return The result future @@ -257,7 +244,7 @@ public interface Client extends OpenSearchClient, Releasable { ActionFuture get(GetRequest request); /** - * Gets the document that was indexed from an index with a type and id. + * Gets the document that was indexed from an index with an id. * * @param request The get request * @param listener A listener to be notified with a result @@ -266,7 +253,7 @@ public interface Client extends OpenSearchClient, Releasable { void get(GetRequest request, ActionListener listener); /** - * Gets the document that was indexed from an index with a type and id. + * Gets the document that was indexed from an index with an id. */ GetRequestBuilder prepareGet(); @@ -291,7 +278,7 @@ public interface Client extends OpenSearchClient, Releasable { MultiGetRequestBuilder prepareMultiGet(); /** - * Search across one or more indices and one or more types with a query. + * Search across one or more indices with a query. * * @param request The search request * @return The result future @@ -300,7 +287,7 @@ public interface Client extends OpenSearchClient, Releasable { ActionFuture search(SearchRequest request); /** - * Search across one or more indices and one or more types with a query. + * Search across one or more indices with a query. * * @param request The search request * @param listener A listener to be notified of the result @@ -309,7 +296,7 @@ public interface Client extends OpenSearchClient, Releasable { void search(SearchRequest request, ActionListener listener); /** - * Search across one or more indices and one or more types with a query. + * Search across one or more indices with a query. */ SearchRequestBuilder prepareSearch(String... indices); diff --git a/server/src/main/java/org/opensearch/client/support/AbstractClient.java b/server/src/main/java/org/opensearch/client/support/AbstractClient.java index 4a5c19819613e..79ad5be58cb78 100644 --- a/server/src/main/java/org/opensearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/opensearch/client/support/AbstractClient.java @@ -449,13 +449,8 @@ public IndexRequestBuilder prepareIndex() { } @Override - public IndexRequestBuilder prepareIndex(String index, String type) { - return prepareIndex(index, type, null); - } - - @Override - public IndexRequestBuilder prepareIndex(String index, String type, @Nullable String id) { - return prepareIndex().setIndex(index).setType(type).setId(id); + public IndexRequestBuilder prepareIndex(String index) { + return new IndexRequestBuilder(this, IndexAction.INSTANCE, index); } @Override @@ -470,12 +465,12 @@ public void update(final UpdateRequest request, final ActionListener listener) { + public void updateMappingOnMaster(Index index, Mapping mappingUpdate, ActionListener listener) { final RunOnce release = new RunOnce(() -> semaphore.release()); try { @@ -121,7 +121,7 @@ public void updateMappingOnMaster(Index index, String type, Mapping mappingUpdat } boolean successFullySent = false; try { - sendUpdateMapping(index, type, mappingUpdate, ActionListener.runBefore(listener, release::run)); + sendUpdateMapping(index, mappingUpdate, ActionListener.runBefore(listener, release::run)); successFullySent = true; } finally { if (successFullySent == false) { @@ -136,10 +136,9 @@ int blockedThreads() { } // can be overridden by tests - protected void sendUpdateMapping(Index index, String type, Mapping mappingUpdate, ActionListener listener) { + protected void sendUpdateMapping(Index index, Mapping mappingUpdate, ActionListener listener) { PutMappingRequest putMappingRequest = new PutMappingRequest(); putMappingRequest.setConcreteIndex(index); - putMappingRequest.type(type); putMappingRequest.source(mappingUpdate.toString(), XContentType.JSON); putMappingRequest.masterNodeTimeout(dynamicMappingUpdateTimeout); putMappingRequest.timeout(TimeValue.ZERO); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java index 02fe7ee8db889..66bca027d7cc4 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java @@ -36,15 +36,19 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.cluster.AbstractDiffable; import org.opensearch.cluster.Diff; +import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContent; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.DocumentMapper; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Collections; import java.util.Map; import static org.opensearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; @@ -53,6 +57,7 @@ * Mapping configuration for a type. */ public class MappingMetadata extends AbstractDiffable { + public static final MappingMetadata EMPTY_MAPPINGS = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap()); public static class Routing { @@ -88,7 +93,7 @@ public int hashCode() { private final CompressedXContent source; - private Routing routing; + private final Routing routing; public MappingMetadata(DocumentMapper docMapper) { this.type = docMapper.type(); @@ -96,6 +101,7 @@ public MappingMetadata(DocumentMapper docMapper) { this.routing = new Routing(docMapper.routingFieldMapper().required()); } + @SuppressWarnings("unchecked") public MappingMetadata(CompressedXContent mapping) { this.source = mapping; Map mappingMap = XContentHelper.convertToMap(mapping.compressedReference(), true).v2(); @@ -103,20 +109,27 @@ public MappingMetadata(CompressedXContent mapping) { throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } this.type = mappingMap.keySet().iterator().next(); - initMappers((Map) mappingMap.get(this.type)); + this.routing = initRouting((Map) mappingMap.get(this.type)); } - public MappingMetadata(String type, Map mapping) throws IOException { + @SuppressWarnings("unchecked") + public MappingMetadata(String type, Map mapping) { this.type = type; - this.source = new CompressedXContent((builder, params) -> builder.mapContents(mapping), XContentType.JSON, ToXContent.EMPTY_PARAMS); + try { + XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); + this.source = new CompressedXContent(BytesReference.bytes(mappingBuilder)); + } catch (IOException e) { + throw new UncheckedIOException(e); // XContent exception, should never happen + } Map withoutType = mapping; if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map) mapping.get(type); } - initMappers(withoutType); + this.routing = initRouting(withoutType); } - private void initMappers(Map withoutType) { + @SuppressWarnings("unchecked") + private Routing initRouting(Map withoutType) { if (withoutType.containsKey("_routing")) { boolean required = false; Map routingNode = (Map) withoutType.get("_routing"); @@ -134,9 +147,9 @@ private void initMappers(Map withoutType) { } } } - this.routing = new Routing(required); + return new Routing(required); } else { - this.routing = Routing.EMPTY; + return Routing.EMPTY; } } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java index 09e3bbe0cac32..b3503f64c53f3 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java @@ -454,44 +454,26 @@ public boolean hasAliases(final String[] aliases, String[] concreteIndices) { } /** - * Finds all mappings for types and concrete indices. Types are expanded to include all types that match the glob - * patterns in the types array. Empty types array, null or {"_all"} will be expanded to all types available for - * the given indices. Only fields that match the provided field filter will be returned (default is a predicate - * that always returns true, which can be overridden via plugins) + * Finds all mappings for concrete indices. Only fields that match the provided field + * filter will be returned (default is a predicate that always returns true, which can be + * overridden via plugins) * * @see MapperPlugin#getFieldFilter() * */ - public ImmutableOpenMap> findMappings( - String[] concreteIndices, - final String[] types, - Function> fieldFilter - ) throws IOException { - assert types != null; + public ImmutableOpenMap findMappings(String[] concreteIndices, Function> fieldFilter) + throws IOException { assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } - boolean isAllTypes = isAllTypes(types); - ImmutableOpenMap.Builder> indexMapBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder indexMapBuilder = ImmutableOpenMap.builder(); Iterable intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetadata indexMetadata = indices.get(index); Predicate fieldPredicate = fieldFilter.apply(index); - if (isAllTypes) { - indexMapBuilder.put(index, filterFields(indexMetadata.getMappings(), fieldPredicate)); - } else { - ImmutableOpenMap.Builder filteredMappings = ImmutableOpenMap.builder(); - for (ObjectObjectCursor cursor : indexMetadata.getMappings()) { - if (Regex.simpleMatch(types, cursor.key)) { - filteredMappings.put(cursor.key, filterFields(cursor.value, fieldPredicate)); - } - } - if (!filteredMappings.isEmpty()) { - indexMapBuilder.put(index, filteredMappings.build()); - } - } + indexMapBuilder.put(index, filterFields(indexMetadata.mapping(), fieldPredicate)); } return indexMapBuilder.build(); } @@ -514,22 +496,11 @@ public ImmutableOpenMap findDataStreams(Str return builder.build(); } - private static ImmutableOpenMap filterFields( - ImmutableOpenMap mappings, - Predicate fieldPredicate - ) throws IOException { - if (fieldPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { - return mappings; - } - ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(mappings.size()); - for (ObjectObjectCursor cursor : mappings) { - builder.put(cursor.key, filterFields(cursor.value, fieldPredicate)); - } - return builder.build(); // No types specified means return them all - } - @SuppressWarnings("unchecked") - private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Predicate fieldPredicate) throws IOException { + private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Predicate fieldPredicate) { + if (mappingMetadata == null) { + return MappingMetadata.EMPTY_MAPPINGS; + } if (fieldPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { return mappingMetadata; } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java index 7b135c9746652..69145bdee72b2 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java @@ -274,7 +274,7 @@ private ClusterState applyRequest( updateList.add(indexMetadata); // try and parse it (no need to add it here) so we can bail early in case of parsing exception DocumentMapper existingMapper = mapperService.documentMapper(); - DocumentMapper newMapper = mapperService.parse(request.type(), mappingUpdateSource); + DocumentMapper newMapper = mapperService.parse(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource); if (existingMapper != null) { // first, simulate: just call merge and ignore the result existingMapper.merge(newMapper.mapping(), MergeReason.MAPPING_UPDATE); @@ -294,7 +294,11 @@ private ClusterState applyRequest( if (existingMapper != null) { existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = mapperService.merge(request.type(), mappingUpdateSource, MergeReason.MAPPING_UPDATE); + DocumentMapper mergedMapper = mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + mappingUpdateSource, + MergeReason.MAPPING_UPDATE + ); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { @@ -343,11 +347,6 @@ private ClusterState applyRequest( return currentState; } } - - @Override - public String describeTasks(List tasks) { - return String.join(", ", tasks.stream().map(t -> (CharSequence) t.type())::iterator); - } } public void putMapping(final PutMappingClusterStateUpdateRequest request, final ActionListener listener) { diff --git a/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java b/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java index e9466b47c3d5b..9ed47ef6cbf39 100644 --- a/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java +++ b/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java @@ -34,9 +34,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; +import org.opensearch.common.concurrent.RefCountedReleasable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.lease.Releasable; -import org.opensearch.common.util.concurrent.AbstractRefCounted; import org.opensearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -50,14 +50,14 @@ public final class ReleasableBytesReference implements Releasable, BytesReferenc public static final Releasable NO_OP = () -> {}; private final BytesReference delegate; - private final AbstractRefCounted refCounted; + private final RefCountedReleasable refCounted; public ReleasableBytesReference(BytesReference delegate, Releasable releasable) { this.delegate = delegate; - this.refCounted = new RefCountedReleasable(releasable); + this.refCounted = new RefCountedReleasable<>("bytes-reference", releasable, releasable::close); } - private ReleasableBytesReference(BytesReference delegate, AbstractRefCounted refCounted) { + private ReleasableBytesReference(BytesReference delegate, RefCountedReleasable refCounted) { this.delegate = delegate; this.refCounted = refCounted; refCounted.incRef(); @@ -82,7 +82,7 @@ public ReleasableBytesReference retainedSlice(int from, int length) { @Override public void close() { - refCounted.decRef(); + refCounted.close(); } @Override @@ -164,19 +164,4 @@ public boolean equals(Object obj) { public int hashCode() { return delegate.hashCode(); } - - private static final class RefCountedReleasable extends AbstractRefCounted { - - private final Releasable releasable; - - RefCountedReleasable(Releasable releasable) { - super("bytes-reference"); - this.releasable = releasable; - } - - @Override - protected void closeInternal() { - releasable.close(); - } - } } diff --git a/server/src/main/java/org/opensearch/common/compress/CompressedXContent.java b/server/src/main/java/org/opensearch/common/compress/CompressedXContent.java index e883b3739c9da..f15e213b9a773 100644 --- a/server/src/main/java/org/opensearch/common/compress/CompressedXContent.java +++ b/server/src/main/java/org/opensearch/common/compress/CompressedXContent.java @@ -41,7 +41,6 @@ import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import java.io.IOException; import java.io.OutputStream; @@ -82,15 +81,19 @@ private CompressedXContent(byte[] compressed, int crc32) { /** * Create a {@link CompressedXContent} out of a {@link ToXContent} instance. */ - public CompressedXContent(ToXContent xcontent, XContentType type, ToXContent.Params params) throws IOException { + public CompressedXContent(ToXContent xcontent, ToXContent.Params params) throws IOException { BytesStreamOutput bStream = new BytesStreamOutput(); OutputStream compressedStream = CompressorFactory.COMPRESSOR.threadLocalOutputStream(bStream); CRC32 crc32 = new CRC32(); OutputStream checkedStream = new CheckedOutputStream(compressedStream, crc32); - try (XContentBuilder builder = XContentFactory.contentBuilder(type, checkedStream)) { - builder.startObject(); + try (XContentBuilder builder = XContentFactory.jsonBuilder(checkedStream)) { + if (xcontent.isFragment()) { + builder.startObject(); + } xcontent.toXContent(builder, params); - builder.endObject(); + if (xcontent.isFragment()) { + builder.endObject(); + } } this.bytes = BytesReference.toBytes(bStream.bytes()); this.crc32 = (int) crc32.getValue(); diff --git a/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java b/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java new file mode 100644 index 0000000000000..cb819c0320e91 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +/** + * Decorator class that wraps an object reference with a {@link Runnable} that is + * invoked when {@link #close()} is called. The internal {@link OneWayGate} instance ensures + * that this is invoked only once. See also {@link GatedCloseable} + */ +public class GatedAutoCloseable implements AutoCloseable { + + private final T ref; + private final Runnable onClose; + private final OneWayGate gate; + + public GatedAutoCloseable(T ref, Runnable onClose) { + this.ref = ref; + this.onClose = onClose; + gate = new OneWayGate(); + } + + public T get() { + return ref; + } + + @Override + public void close() { + if (gate.close()) { + onClose.run(); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java b/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java new file mode 100644 index 0000000000000..d98e4cca8d561 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java @@ -0,0 +1,48 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.opensearch.common.CheckedRunnable; + +import java.io.Closeable; +import java.io.IOException; + +/** + * Decorator class that wraps an object reference with a {@link CheckedRunnable} that is + * invoked when {@link #close()} is called. The internal {@link OneWayGate} instance ensures + * that this is invoked only once. See also {@link GatedAutoCloseable} + */ +public class GatedCloseable implements Closeable { + + private final T ref; + private final CheckedRunnable onClose; + private final OneWayGate gate; + + public GatedCloseable(T ref, CheckedRunnable onClose) { + this.ref = ref; + this.onClose = onClose; + gate = new OneWayGate(); + } + + public T get() { + return ref; + } + + @Override + public void close() throws IOException { + if (gate.close()) { + onClose.run(); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java b/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java new file mode 100644 index 0000000000000..76625094f3ca6 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Encapsulates logic for a one-way gate. Guarantees idempotency via the {@link AtomicBoolean} instance + * and the return value of the {@link #close()} function. + */ +public class OneWayGate { + + private final AtomicBoolean closed = new AtomicBoolean(); + + /** + * Closes the gate and sets the internal boolean value in an idempotent + * fashion. This is a one-way operation and cannot be reset. + * @return true if the gate was closed in this invocation, + * false if the gate was already closed + */ + public boolean close() { + return closed.compareAndSet(false, true); + } + + /** + * Indicates if the gate has been closed. + * @return true if the gate is closed, false otherwise + */ + public boolean isClosed() { + return closed.get(); + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java b/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java new file mode 100644 index 0000000000000..975f2295d7c32 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java @@ -0,0 +1,48 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.opensearch.common.lease.Releasable; +import org.opensearch.common.util.concurrent.AbstractRefCounted; + +/** + * Decorator class that wraps an object reference as a {@link AbstractRefCounted} instance. + * In addition to a {@link String} name, it accepts a {@link Runnable} shutdown hook that is + * invoked when the reference count reaches zero i.e. on {@link #closeInternal()}. + */ +public class RefCountedReleasable extends AbstractRefCounted implements Releasable { + + private final T ref; + private final Runnable shutdownRunnable; + + public RefCountedReleasable(String name, T ref, Runnable shutdownRunnable) { + super(name); + this.ref = ref; + this.shutdownRunnable = shutdownRunnable; + } + + @Override + public void close() { + decRef(); + } + + public T get() { + return ref; + } + + @Override + protected void closeInternal() { + shutdownRunnable.run(); + } +} diff --git a/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java b/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java index 948e1a4e6eb37..671123f2df767 100644 --- a/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java +++ b/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java @@ -293,7 +293,7 @@ private boolean isOkayDuplicate(Binding original, BindingImpl binding) { return false; } - // It's unfortunate that we have to maintain a blacklist of specific + // It's unfortunate that we have to maintain a denylist of specific // classes, but we can't easily block the whole package because of // all our unit tests. private static final Set> FORBIDDEN_TYPES = unmodifiableSet( diff --git a/server/src/main/java/org/opensearch/common/inject/InheritingState.java b/server/src/main/java/org/opensearch/common/inject/InheritingState.java index 3d821114ff4b4..70a2fb335cca5 100644 --- a/server/src/main/java/org/opensearch/common/inject/InheritingState.java +++ b/server/src/main/java/org/opensearch/common/inject/InheritingState.java @@ -61,7 +61,7 @@ class InheritingState implements State { private final Map, Scope> scopes = new HashMap<>(); private final List converters = new ArrayList<>(); private final List listenerBindings = new ArrayList<>(); - private WeakKeySet blacklistedKeys = new WeakKeySet(); + private WeakKeySet denylistedKeys = new WeakKeySet(); private final Object lock; InheritingState(State parent) { @@ -145,17 +145,17 @@ public List getTypeListenerBindings() { @Override public void blacklist(Key key) { parent.blacklist(key); - blacklistedKeys.add(key); + denylistedKeys.add(key); } @Override public boolean isBlacklisted(Key key) { - return blacklistedKeys.contains(key); + return denylistedKeys.contains(key); } @Override public void clearBlacklisted() { - blacklistedKeys = new WeakKeySet(); + denylistedKeys = new WeakKeySet(); } @Override diff --git a/server/src/main/java/org/opensearch/common/inject/State.java b/server/src/main/java/org/opensearch/common/inject/State.java index 497c7d4d51e08..6a69e9547d707 100644 --- a/server/src/main/java/org/opensearch/common/inject/State.java +++ b/server/src/main/java/org/opensearch/common/inject/State.java @@ -164,7 +164,7 @@ public Object lock() { /** * Forbids the corresponding injector from creating a binding to {@code key}. Child injectors - * blacklist their bound keys on their parent injectors to prevent just-in-time bindings on the + * denylist their bound keys on their parent injectors to prevent just-in-time bindings on the * parent injector that would conflict. */ void blacklist(Key key); @@ -177,11 +177,11 @@ public Object lock() { /** * Returns the shared lock for all injector data. This is a low-granularity, high-contention lock - * to be used when reading mutable data (ie. just-in-time bindings, and binding blacklists). + * to be used when reading mutable data (ie. just-in-time bindings, and binding denylists). */ Object lock(); - // ES_GUICE: clean blacklist keys + // ES_GUICE: clean denylist keys void clearBlacklisted(); void makeAllBindingsToEagerSingletons(Injector injector); diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/FieldValueFactorFunction.java b/server/src/main/java/org/opensearch/common/lucene/search/function/FieldValueFactorFunction.java index a015b24d73e5a..3233fc9f8cecc 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/FieldValueFactorFunction.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/FieldValueFactorFunction.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.opensearch.OpenSearchException; +import org.opensearch.common.Nullable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -55,6 +56,8 @@ public class FieldValueFactorFunction extends ScoreFunction { private final String field; private final float boostFactor; private final Modifier modifier; + private final String functionName; + /** * Value used if the document is missing the field. */ @@ -67,6 +70,17 @@ public FieldValueFactorFunction( Modifier modifierType, Double missing, IndexNumericFieldData indexFieldData + ) { + this(field, boostFactor, modifierType, missing, indexFieldData, null); + } + + public FieldValueFactorFunction( + String field, + float boostFactor, + Modifier modifierType, + Double missing, + IndexNumericFieldData indexFieldData, + @Nullable String functionName ) { super(CombineFunction.MULTIPLY); this.field = field; @@ -74,6 +88,7 @@ public FieldValueFactorFunction( this.modifier = modifierType; this.indexFieldData = indexFieldData; this.missing = missing; + this.functionName = functionName; } @Override @@ -127,7 +142,7 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE (float) score, String.format( Locale.ROOT, - "field value function: %s(doc['%s'].value%s * factor=%s)", + "field value function" + Functions.nameOrEmptyFunc(functionName) + ": %s(doc['%s'].value%s * factor=%s)", modifierStr, field, defaultStr, diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java index 36ecf690862cc..f7b91db2e712f 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java @@ -46,6 +46,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.opensearch.OpenSearchException; +import org.opensearch.common.Nullable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -70,11 +71,28 @@ public class FunctionScoreQuery extends Query { public static class FilterScoreFunction extends ScoreFunction { public final Query filter; public final ScoreFunction function; + public final String queryName; + /** + * Creates a FilterScoreFunction with query and function. + * @param filter filter query + * @param function score function + */ public FilterScoreFunction(Query filter, ScoreFunction function) { + this(filter, function, null); + } + + /** + * Creates a FilterScoreFunction with query and function. + * @param filter filter query + * @param function score function + * @param queryName filter query name + */ + public FilterScoreFunction(Query filter, ScoreFunction function, @Nullable String queryName) { super(function.getDefaultScoreCombiner()); this.filter = filter; this.function = function; + this.queryName = queryName; } @Override @@ -93,12 +111,14 @@ protected boolean doEquals(ScoreFunction other) { return false; } FilterScoreFunction that = (FilterScoreFunction) other; - return Objects.equals(this.filter, that.filter) && Objects.equals(this.function, that.function); + return Objects.equals(this.filter, that.filter) + && Objects.equals(this.function, that.function) + && Objects.equals(this.queryName, that.queryName); } @Override protected int doHashCode() { - return Objects.hash(filter, function); + return Objects.hash(filter, function, queryName); } @Override @@ -107,7 +127,7 @@ protected ScoreFunction rewrite(IndexReader reader) throws IOException { if (newFilter == filter) { return this; } - return new FilterScoreFunction(newFilter, function); + return new FilterScoreFunction(newFilter, function, queryName); } @Override @@ -144,6 +164,7 @@ public static ScoreMode fromString(String scoreMode) { final float maxBoost; private final Float minScore; private final CombineFunction combineFunction; + private final String queryName; /** * Creates a FunctionScoreQuery without function. @@ -152,7 +173,18 @@ public static ScoreMode fromString(String scoreMode) { * @param maxBoost The maximum applicable boost. */ public FunctionScoreQuery(Query subQuery, Float minScore, float maxBoost) { - this(subQuery, ScoreMode.FIRST, new ScoreFunction[0], CombineFunction.MULTIPLY, minScore, maxBoost); + this(subQuery, null, minScore, maxBoost); + } + + /** + * Creates a FunctionScoreQuery without function. + * @param subQuery The query to match. + * @param queryName filter query name + * @param minScore The minimum score to consider a document. + * @param maxBoost The maximum applicable boost. + */ + public FunctionScoreQuery(Query subQuery, @Nullable String queryName, Float minScore, float maxBoost) { + this(subQuery, queryName, ScoreMode.FIRST, new ScoreFunction[0], CombineFunction.MULTIPLY, minScore, maxBoost); } /** @@ -161,7 +193,17 @@ public FunctionScoreQuery(Query subQuery, Float minScore, float maxBoost) { * @param function The {@link ScoreFunction} to apply. */ public FunctionScoreQuery(Query subQuery, ScoreFunction function) { - this(subQuery, function, CombineFunction.MULTIPLY, null, DEFAULT_MAX_BOOST); + this(subQuery, null, function); + } + + /** + * Creates a FunctionScoreQuery with a single {@link ScoreFunction} + * @param subQuery The query to match. + * @param queryName filter query name + * @param function The {@link ScoreFunction} to apply. + */ + public FunctionScoreQuery(Query subQuery, @Nullable String queryName, ScoreFunction function) { + this(subQuery, queryName, function, CombineFunction.MULTIPLY, null, DEFAULT_MAX_BOOST); } /** @@ -173,12 +215,53 @@ public FunctionScoreQuery(Query subQuery, ScoreFunction function) { * @param maxBoost The maximum applicable boost. */ public FunctionScoreQuery(Query subQuery, ScoreFunction function, CombineFunction combineFunction, Float minScore, float maxBoost) { - this(subQuery, ScoreMode.FIRST, new ScoreFunction[] { function }, combineFunction, minScore, maxBoost); + this(subQuery, null, function, combineFunction, minScore, maxBoost); + } + + /** + * Creates a FunctionScoreQuery with a single function + * @param subQuery The query to match. + * @param queryName filter query name + * @param function The {@link ScoreFunction} to apply. + * @param combineFunction Defines how the query and function score should be applied. + * @param minScore The minimum score to consider a document. + * @param maxBoost The maximum applicable boost. + */ + public FunctionScoreQuery( + Query subQuery, + @Nullable String queryName, + ScoreFunction function, + CombineFunction combineFunction, + Float minScore, + float maxBoost + ) { + this(subQuery, queryName, ScoreMode.FIRST, new ScoreFunction[] { function }, combineFunction, minScore, maxBoost); + } + + /** + * Creates a FunctionScoreQuery with multiple score functions + * @param subQuery The query to match. + * @param scoreMode Defines how the different score functions should be combined. + * @param functions The {@link ScoreFunction}s to apply. + * @param combineFunction Defines how the query and function score should be applied. + * @param minScore The minimum score to consider a document. + * @param maxBoost The maximum applicable boost. + */ + public FunctionScoreQuery( + Query subQuery, + ScoreMode scoreMode, + ScoreFunction[] functions, + CombineFunction combineFunction, + Float minScore, + float maxBoost + ) { + this(subQuery, null, scoreMode, functions, combineFunction, minScore, maxBoost); } /** * Creates a FunctionScoreQuery with multiple score functions * @param subQuery The query to match. + * @param queryName filter query name * @param scoreMode Defines how the different score functions should be combined. * @param functions The {@link ScoreFunction}s to apply. * @param combineFunction Defines how the query and function score should be applied. @@ -187,6 +270,7 @@ public FunctionScoreQuery(Query subQuery, ScoreFunction function, CombineFunctio */ public FunctionScoreQuery( Query subQuery, + @Nullable String queryName, ScoreMode scoreMode, ScoreFunction[] functions, CombineFunction combineFunction, @@ -197,6 +281,7 @@ public FunctionScoreQuery( throw new IllegalArgumentException("Score function should not be null"); } this.subQuery = subQuery; + this.queryName = queryName; this.scoreMode = scoreMode; this.functions = functions; this.maxBoost = maxBoost; @@ -240,7 +325,7 @@ public Query rewrite(IndexReader reader) throws IOException { needsRewrite |= (newFunctions[i] != functions[i]); } if (needsRewrite) { - return new FunctionScoreQuery(newQ, scoreMode, newFunctions, combineFunction, minScore, maxBoost); + return new FunctionScoreQuery(newQ, queryName, scoreMode, newFunctions, combineFunction, minScore, maxBoost); } return this; } @@ -332,8 +417,7 @@ public Scorer scorer(LeafReaderContext context) throws IOException { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - - Explanation expl = subQueryWeight.explain(context, doc); + Explanation expl = Functions.explainWithName(subQueryWeight.explain(context, doc), queryName); if (!expl.isMatch()) { return expl; } @@ -355,11 +439,15 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, expl); if (function instanceof FilterScoreFunction) { float factor = functionExplanation.getValue().floatValue(); - Query filterQuery = ((FilterScoreFunction) function).filter; + final FilterScoreFunction filterScoreFunction = (FilterScoreFunction) function; + Query filterQuery = filterScoreFunction.filter; Explanation filterExplanation = Explanation.match( factor, "function score, product of:", - Explanation.match(1.0f, "match filter: " + filterQuery.toString()), + Explanation.match( + 1.0f, + "match filter" + Functions.nameOrEmptyFunc(filterScoreFunction.queryName) + ": " + filterQuery.toString() + ), functionExplanation ); functionsExplanations.add(filterExplanation); @@ -543,11 +631,12 @@ public boolean equals(Object o) { && Objects.equals(this.combineFunction, other.combineFunction) && Objects.equals(this.minScore, other.minScore) && Objects.equals(this.scoreMode, other.scoreMode) - && Arrays.equals(this.functions, other.functions); + && Arrays.equals(this.functions, other.functions) + && Objects.equals(this.queryName, other.queryName); } @Override public int hashCode() { - return Objects.hash(classHash(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(functions)); + return Objects.hash(classHash(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(functions), queryName); } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/Functions.java b/server/src/main/java/org/opensearch/common/lucene/search/function/Functions.java new file mode 100644 index 0000000000000..a9de8ead31e2a --- /dev/null +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/Functions.java @@ -0,0 +1,66 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.lucene.search.function; + +import org.apache.lucene.search.Explanation; +import org.opensearch.common.Strings; +import org.opensearch.index.query.AbstractQueryBuilder; +import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; + +/** + * Helper utility class for functions + */ +public final class Functions { + private Functions() {} + + /** + * Return function name wrapped into brackets or empty string, for example: '(_name: func1)' + * @param functionName function name + * @return function name wrapped into brackets or empty string + */ + public static String nameOrEmptyFunc(final String functionName) { + if (!Strings.isNullOrEmpty(functionName)) { + return "(" + AbstractQueryBuilder.NAME_FIELD.getPreferredName() + ": " + functionName + ")"; + } else { + return ""; + } + } + + /** + * Return function name as an argument or empty string, for example: ', _name: func1' + * @param functionName function name + * @return function name as an argument or empty string + */ + public static String nameOrEmptyArg(final String functionName) { + if (!Strings.isNullOrEmpty(functionName)) { + return ", " + FunctionScoreQueryBuilder.NAME_FIELD.getPreferredName() + ": " + functionName; + } else { + return ""; + } + } + + /** + * Enrich explanation with query name + * @param explanation explanation + * @param queryName query name + * @return explanation enriched with query name + */ + public static Explanation explainWithName(Explanation explanation, String queryName) { + if (Strings.isNullOrEmpty(queryName)) { + return explanation; + } else { + final String description = explanation.getDescription() + " " + nameOrEmptyFunc(queryName); + if (explanation.isMatch()) { + return Explanation.match(explanation.getValue(), description, explanation.getDetails()); + } else { + return Explanation.noMatch(description, explanation.getDetails()); + } + } + } +} diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/RandomScoreFunction.java b/server/src/main/java/org/opensearch/common/lucene/search/function/RandomScoreFunction.java index 78df111393394..f4fcda47b0078 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/RandomScoreFunction.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/RandomScoreFunction.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.apache.lucene.util.StringHelper; +import org.opensearch.common.Nullable; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.LeafFieldData; import org.opensearch.index.fielddata.SortedBinaryDocValues; @@ -50,6 +51,7 @@ public class RandomScoreFunction extends ScoreFunction { private final int originalSeed; private final int saltedSeed; private final IndexFieldData fieldData; + private final String functionName; /** * Creates a RandomScoreFunction. @@ -59,10 +61,23 @@ public class RandomScoreFunction extends ScoreFunction { * @param uidFieldData The field data for _uid to use for generating consistent random values for the same id */ public RandomScoreFunction(int seed, int salt, IndexFieldData uidFieldData) { + this(seed, salt, uidFieldData, null); + } + + /** + * Creates a RandomScoreFunction. + * + * @param seed A seed for randomness + * @param salt A value to salt the seed with, ideally unique to the running node/index + * @param uidFieldData The field data for _uid to use for generating consistent random values for the same id + * @param functionName The function name + */ + public RandomScoreFunction(int seed, int salt, IndexFieldData uidFieldData, @Nullable String functionName) { super(CombineFunction.MULTIPLY); this.originalSeed = seed; this.saltedSeed = BitMixer.mix(seed, salt); this.fieldData = uidFieldData; + this.functionName = functionName; } @Override @@ -97,7 +112,7 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE String field = fieldData == null ? null : fieldData.getFieldName(); return Explanation.match( (float) score(docId, subQueryScore.getValue().floatValue()), - "random score function (seed: " + originalSeed + ", field: " + field + ")" + "random score function (seed: " + originalSeed + ", field: " + field + Functions.nameOrEmptyArg(functionName) + ")" ); } }; diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreFunction.java index 5ce50844b3dcc..3a7cc970908a5 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreFunction.java @@ -39,6 +39,7 @@ import org.opensearch.script.ScoreScript; import org.opensearch.script.Script; import org.opensearch.Version; +import org.opensearch.common.Nullable; import java.io.IOException; import java.util.Objects; @@ -67,14 +68,23 @@ public float score() { private final int shardId; private final String indexName; private final Version indexVersion; - - public ScriptScoreFunction(Script sScript, ScoreScript.LeafFactory script, String indexName, int shardId, Version indexVersion) { + private final String functionName; + + public ScriptScoreFunction( + Script sScript, + ScoreScript.LeafFactory script, + String indexName, + int shardId, + Version indexVersion, + @Nullable String functionName + ) { super(CombineFunction.REPLACE); this.sScript = sScript; this.script = script; this.indexName = indexName; this.shardId = shardId; this.indexVersion = indexVersion; + this.functionName = functionName; } @Override @@ -105,11 +115,15 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE leafScript.setDocument(docId); scorer.docid = docId; scorer.score = subQueryScore.getValue().floatValue(); - exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore); + exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore, functionName); } else { double score = score(docId, subQueryScore.getValue().floatValue()); // info about params already included in sScript - String explanation = "script score function, computed with script:\"" + sScript + "\""; + String explanation = "script score function" + + Functions.nameOrEmptyFunc(functionName) + + ", computed with script:\"" + + sScript + + "\""; Explanation scoreExp = Explanation.match(subQueryScore.getValue(), "_score: ", subQueryScore); return Explanation.match((float) score, explanation, scoreExp); } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java index 7d9f293b0c17b..44c76e74d5a41 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java @@ -50,6 +50,7 @@ import org.apache.lucene.search.BulkScorer; import org.apache.lucene.util.Bits; import org.opensearch.Version; +import org.opensearch.common.Nullable; import org.opensearch.script.ScoreScript; import org.opensearch.script.ScoreScript.ExplanationHolder; import org.opensearch.script.Script; @@ -69,6 +70,7 @@ public class ScriptScoreQuery extends Query { private final String indexName; private final int shardId; private final Version indexVersion; + private final String queryName; public ScriptScoreQuery( Query subQuery, @@ -78,8 +80,22 @@ public ScriptScoreQuery( String indexName, int shardId, Version indexVersion + ) { + this(subQuery, null, script, scriptBuilder, minScore, indexName, shardId, indexVersion); + } + + public ScriptScoreQuery( + Query subQuery, + @Nullable String queryName, + Script script, + ScoreScript.LeafFactory scriptBuilder, + Float minScore, + String indexName, + int shardId, + Version indexVersion ) { this.subQuery = subQuery; + this.queryName = queryName; this.script = script; this.scriptBuilder = scriptBuilder; this.minScore = minScore; @@ -92,7 +108,7 @@ public ScriptScoreQuery( public Query rewrite(IndexReader reader) throws IOException { Query newQ = subQuery.rewrite(reader); if (newQ != subQuery) { - return new ScriptScoreQuery(newQ, script, scriptBuilder, minScore, indexName, shardId, indexVersion); + return new ScriptScoreQuery(newQ, queryName, script, scriptBuilder, minScore, indexName, shardId, indexVersion); } return super.rewrite(reader); } @@ -140,7 +156,7 @@ public Scorer scorer(LeafReaderContext context) throws IOException { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Explanation subQueryExplanation = subQueryWeight.explain(context, doc); + Explanation subQueryExplanation = Functions.explainWithName(subQueryWeight.explain(context, doc), queryName); if (subQueryExplanation.isMatch() == false) { return subQueryExplanation; } @@ -210,7 +226,8 @@ public void visit(QueryVisitor visitor) { @Override public String toString(String field) { StringBuilder sb = new StringBuilder(); - sb.append("script_score (").append(subQuery.toString(field)).append(", script: "); + sb.append("script_score (").append(subQuery.toString(field)); + sb.append(Functions.nameOrEmptyArg(queryName)).append(", script: "); sb.append("{" + script.toString() + "}"); return sb.toString(); } @@ -225,12 +242,13 @@ public boolean equals(Object o) { && script.equals(that.script) && Objects.equals(minScore, that.minScore) && indexName.equals(that.indexName) - && indexVersion.equals(that.indexVersion); + && indexVersion.equals(that.indexVersion) + && Objects.equals(queryName, that.queryName); } @Override public int hashCode() { - return Objects.hash(subQuery, script, minScore, indexName, shardId, indexVersion); + return Objects.hash(subQuery, script, minScore, indexName, shardId, indexVersion, queryName); } private static class ScriptScorer extends Scorer { diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/WeightFactorFunction.java b/server/src/main/java/org/opensearch/common/lucene/search/function/WeightFactorFunction.java index 9ef33efdfd9f5..71968a0545cff 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/WeightFactorFunction.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/WeightFactorFunction.java @@ -34,6 +34,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; +import org.opensearch.common.Strings; import java.io.IOException; import java.util.Objects; @@ -45,9 +47,17 @@ public class WeightFactorFunction extends ScoreFunction { private float weight = 1.0f; public WeightFactorFunction(float weight, ScoreFunction scoreFunction) { + this(weight, scoreFunction, null); + } + + public WeightFactorFunction(float weight, ScoreFunction scoreFunction, @Nullable String functionName) { super(CombineFunction.MULTIPLY); if (scoreFunction == null) { - this.scoreFunction = SCORE_ONE; + if (Strings.isNullOrEmpty(functionName)) { + this.scoreFunction = SCORE_ONE; + } else { + this.scoreFunction = new ScoreOne(CombineFunction.MULTIPLY, functionName); + } } else { this.scoreFunction = scoreFunction; } @@ -55,9 +65,11 @@ public WeightFactorFunction(float weight, ScoreFunction scoreFunction) { } public WeightFactorFunction(float weight) { - super(CombineFunction.MULTIPLY); - this.scoreFunction = SCORE_ONE; - this.weight = weight; + this(weight, null, null); + } + + public WeightFactorFunction(float weight, @Nullable String functionName) { + this(weight, null, functionName); } @Override @@ -112,9 +124,15 @@ protected int doHashCode() { } private static class ScoreOne extends ScoreFunction { + private final String functionName; protected ScoreOne(CombineFunction scoreCombiner) { + this(scoreCombiner, null); + } + + protected ScoreOne(CombineFunction scoreCombiner, @Nullable String functionName) { super(scoreCombiner); + this.functionName = functionName; } @Override @@ -127,7 +145,10 @@ public double score(int docId, float subQueryScore) { @Override public Explanation explainScore(int docId, Explanation subQueryScore) { - return Explanation.match(1.0f, "constant score 1.0 - no function provided"); + return Explanation.match( + 1.0f, + "constant score 1.0" + Functions.nameOrEmptyFunc(functionName) + " - no function provided" + ); } }; } diff --git a/server/src/main/java/org/opensearch/common/time/EpochTime.java b/server/src/main/java/org/opensearch/common/time/EpochTime.java index 5c6e024c7475c..7894a653492c8 100644 --- a/server/src/main/java/org/opensearch/common/time/EpochTime.java +++ b/server/src/main/java/org/opensearch/common/time/EpochTime.java @@ -43,8 +43,10 @@ import java.time.temporal.TemporalField; import java.time.temporal.TemporalUnit; import java.time.temporal.ValueRange; +import java.util.HashMap; import java.util.Locale; import java.util.Map; +import java.util.Optional; /** * This class provides {@link DateTimeFormatter}s capable of parsing epoch seconds and milliseconds. @@ -52,13 +54,14 @@ * The seconds formatter is provided by {@link #SECONDS_FORMATTER}. * The milliseconds formatter is provided by {@link #MILLIS_FORMATTER}. *

        - * Both formatters support fractional time, up to nanosecond precision. Values must be positive numbers. + * Both formatters support fractional time, up to nanosecond precision. */ class EpochTime { private static final ValueRange LONG_POSITIVE_RANGE = ValueRange.of(0, Long.MAX_VALUE); + private static final ValueRange LONG_RANGE = ValueRange.of(Long.MIN_VALUE, Long.MAX_VALUE); - private static final EpochField SECONDS = new EpochField(ChronoUnit.SECONDS, ChronoUnit.FOREVER, LONG_POSITIVE_RANGE) { + private static final EpochField SECONDS = new EpochField(ChronoUnit.SECONDS, ChronoUnit.FOREVER, LONG_RANGE) { @Override public boolean isSupportedBy(TemporalAccessor temporal) { return temporal.isSupported(ChronoField.INSTANT_SECONDS); @@ -97,15 +100,55 @@ public long getFrom(TemporalAccessor temporal) { } }; - private static final EpochField MILLIS = new EpochField(ChronoUnit.MILLIS, ChronoUnit.FOREVER, LONG_POSITIVE_RANGE) { + private static final long NEGATIVE = 0; + private static final long POSITIVE = 1; + private static final EpochField SIGN = new EpochField(ChronoUnit.FOREVER, ChronoUnit.FOREVER, ValueRange.of(NEGATIVE, POSITIVE)) { @Override public boolean isSupportedBy(TemporalAccessor temporal) { - return temporal.isSupported(ChronoField.INSTANT_SECONDS) && temporal.isSupported(ChronoField.MILLI_OF_SECOND); + return temporal.isSupported(ChronoField.INSTANT_SECONDS); + } + + @Override + public long getFrom(TemporalAccessor temporal) { + return temporal.getLong(ChronoField.INSTANT_SECONDS) < 0 ? NEGATIVE : POSITIVE; + } + }; + + // Millis as absolute values. Negative millis are encoded by having a NEGATIVE SIGN. + private static final EpochField MILLIS_ABS = new EpochField(ChronoUnit.MILLIS, ChronoUnit.FOREVER, LONG_POSITIVE_RANGE) { + @Override + public boolean isSupportedBy(TemporalAccessor temporal) { + return temporal.isSupported(ChronoField.INSTANT_SECONDS) + && (temporal.isSupported(ChronoField.NANO_OF_SECOND) || temporal.isSupported(ChronoField.MILLI_OF_SECOND)); } @Override public long getFrom(TemporalAccessor temporal) { - return temporal.getLong(ChronoField.INSTANT_SECONDS) * 1_000 + temporal.getLong(ChronoField.MILLI_OF_SECOND); + long instantSecondsInMillis = temporal.getLong(ChronoField.INSTANT_SECONDS) * 1_000; + if (instantSecondsInMillis >= 0) { + if (temporal.isSupported(ChronoField.NANO_OF_SECOND)) { + return instantSecondsInMillis + (temporal.getLong(ChronoField.NANO_OF_SECOND) / 1_000_000); + } else { + return instantSecondsInMillis + temporal.getLong(ChronoField.MILLI_OF_SECOND); + } + } else { // negative timestamp + if (temporal.isSupported(ChronoField.NANO_OF_SECOND)) { + long millis = instantSecondsInMillis; + long nanos = temporal.getLong(ChronoField.NANO_OF_SECOND); + if (nanos % 1_000_000 != 0) { + // Fractional negative timestamp. + // Add 1 ms towards positive infinity because the fraction leads + // the output's integral part to be an off-by-one when the + // `(nanos / 1_000_000)` is added below. + millis += 1; + } + millis += (nanos / 1_000_000); + return -millis; + } else { + long millisOfSecond = temporal.getLong(ChronoField.MILLI_OF_SECOND); + return -(instantSecondsInMillis + millisOfSecond); + } + } } @Override @@ -114,12 +157,37 @@ public TemporalAccessor resolve( TemporalAccessor partialTemporal, ResolverStyle resolverStyle ) { - long secondsAndMillis = fieldValues.remove(this); - long seconds = secondsAndMillis / 1_000; - long nanos = secondsAndMillis % 1000 * 1_000_000; + Long sign = Optional.ofNullable(fieldValues.remove(SIGN)).orElse(POSITIVE); + Long nanosOfMilli = fieldValues.remove(NANOS_OF_MILLI); - if (nanosOfMilli != null) { - nanos += nanosOfMilli; + long secondsAndMillis = fieldValues.remove(this); + + long seconds; + long nanos; + if (sign == NEGATIVE) { + secondsAndMillis = -secondsAndMillis; + seconds = secondsAndMillis / 1_000; + nanos = secondsAndMillis % 1000 * 1_000_000; + // `secondsAndMillis < 0` implies negative timestamp; so `nanos < 0` + if (nanosOfMilli != null) { + // aggregate fractional part of the input; subtract b/c `nanos < 0` + nanos -= nanosOfMilli; + } + if (nanos != 0) { + // nanos must be positive. B/c the timestamp is represented by the + // (seconds, nanos) tuple, seconds moves 1s toward negative-infinity + // and nanos moves 1s toward positive-infinity + seconds -= 1; + nanos = 1_000_000_000 + nanos; + } + } else { + seconds = secondsAndMillis / 1_000; + nanos = secondsAndMillis % 1000 * 1_000_000; + + if (nanosOfMilli != null) { + // aggregate fractional part of the input + nanos += nanosOfMilli; + } } fieldValues.put(ChronoField.INSTANT_SECONDS, seconds); fieldValues.put(ChronoField.NANO_OF_SECOND, nanos); @@ -127,6 +195,9 @@ public TemporalAccessor resolve( if (fieldValues.containsKey(ChronoField.MILLI_OF_SECOND)) { fieldValues.put(ChronoField.MILLI_OF_SECOND, nanos / 1_000_000); } + if (fieldValues.containsKey(ChronoField.MICRO_OF_SECOND)) { + fieldValues.put(ChronoField.MICRO_OF_SECOND, nanos / 1000); + } return null; } }; @@ -141,7 +212,11 @@ public boolean isSupportedBy(TemporalAccessor temporal) { @Override public long getFrom(TemporalAccessor temporal) { - return temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000; + if (temporal.getLong(ChronoField.INSTANT_SECONDS) < 0) { + return (1_000_000_000 - temporal.getLong(ChronoField.NANO_OF_SECOND)) % 1_000_000; + } else { + return temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000; + } } }; @@ -157,13 +232,22 @@ public long getFrom(TemporalAccessor temporal) { .appendLiteral('.') .toFormatter(Locale.ROOT); - // this supports milliseconds without any fraction - private static final DateTimeFormatter MILLISECONDS_FORMATTER1 = new DateTimeFormatterBuilder().appendValue( - MILLIS, - 1, - 19, - SignStyle.NORMAL - ).optionalStart().appendFraction(NANOS_OF_MILLI, 0, 6, true).optionalEnd().toFormatter(Locale.ROOT); + private static final Map SIGN_FORMATTER_LOOKUP = new HashMap() { + { + put(POSITIVE, ""); + put(NEGATIVE, "-"); + } + }; + + // this supports milliseconds + private static final DateTimeFormatter MILLISECONDS_FORMATTER1 = new DateTimeFormatterBuilder().optionalStart() + .appendText(SIGN, SIGN_FORMATTER_LOOKUP) // field is only created in the presence of a '-' char. + .optionalEnd() + .appendValue(MILLIS_ABS, 1, 19, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANOS_OF_MILLI, 0, 6, true) + .optionalEnd() + .toFormatter(Locale.ROOT); // this supports milliseconds ending in dot private static final DateTimeFormatter MILLISECONDS_FORMATTER2 = new DateTimeFormatterBuilder().append(MILLISECONDS_FORMATTER1) diff --git a/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java b/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java new file mode 100644 index 0000000000000..313c0d359bb02 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec; + +import org.apache.logging.log4j.Logger; +import org.opensearch.common.Nullable; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.mapper.MapperService; + +import java.util.Objects; + +/** + * The configuration parameters necessary for the {@link CodecService} instance construction. + */ +public final class CodecServiceConfig { + private final IndexSettings indexSettings; + private final MapperService mapperService; + private final Logger logger; + + public CodecServiceConfig(IndexSettings indexSettings, @Nullable MapperService mapperService, @Nullable Logger logger) { + this.indexSettings = Objects.requireNonNull(indexSettings); + this.mapperService = mapperService; + this.logger = logger; + } + + public IndexSettings getIndexSettings() { + return indexSettings; + } + + @Nullable + public MapperService getMapperService() { + return mapperService; + } + + @Nullable + public Logger getLogger() { + return logger; + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java b/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java new file mode 100644 index 0000000000000..da28c5f06b035 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java @@ -0,0 +1,22 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec; + +/** + * A factory for creating new {@link CodecService} instance + */ +@FunctionalInterface +public interface CodecServiceFactory { + /** + * Create new {@link CodecService} instance + * @param config code service configuration + * @return new {@link CodecService} instance + */ + CodecService createCodecService(CodecServiceConfig config); +} diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index a768a5275e586..a9054e5ca4122 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -59,6 +59,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.ImmutableOpenMap; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; import org.opensearch.common.logging.Loggers; @@ -1116,7 +1117,7 @@ public abstract void forceMerge( * * @param flushFirst indicates whether the engine should flush before returning the snapshot */ - public abstract IndexCommitRef acquireLastIndexCommit(boolean flushFirst) throws EngineException; + public abstract GatedCloseable acquireLastIndexCommit(boolean flushFirst) throws EngineException; /** * Fetch a snapshot of the latest SegmentInfos from the engine and ensure that segment files are retained in the directory @@ -1139,7 +1140,7 @@ public SegmentInfos getLatestSegmentInfos() { /** * Snapshots the most recent safe index commit from the engine. */ - public abstract IndexCommitRef acquireSafeIndexCommit() throws EngineException; + public abstract GatedCloseable acquireSafeIndexCommit() throws EngineException; /** * @return a summary of the contents of the current safe commit @@ -1854,28 +1855,6 @@ private void awaitPendingClose() { } } - public static class IndexCommitRef implements Closeable { - private final AtomicBoolean closed = new AtomicBoolean(); - private final CheckedRunnable onClose; - private final IndexCommit indexCommit; - - public IndexCommitRef(IndexCommit indexCommit, CheckedRunnable onClose) { - this.indexCommit = indexCommit; - this.onClose = onClose; - } - - @Override - public void close() throws IOException { - if (closed.compareAndSet(false, true)) { - onClose.run(); - } - } - - public IndexCommit getIndexCommit() { - return indexCommit; - } - } - public static class SegmentInfosRef implements Closeable { private final AtomicBoolean closed = new AtomicBoolean(); private final CheckedRunnable onClose; diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java b/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java index 527252f20cb97..9447a623c3ffe 100644 --- a/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java +++ b/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java @@ -8,6 +8,7 @@ package org.opensearch.index.engine; +import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.search.QueryCache; @@ -15,9 +16,13 @@ import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.Sort; import org.apache.lucene.search.similarities.Similarity; +import org.opensearch.common.Nullable; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceConfig; +import org.opensearch.index.codec.CodecServiceFactory; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.shard.ShardId; import org.opensearch.index.store.Store; @@ -39,7 +44,7 @@ * A factory to create an EngineConfig based on custom plugin overrides */ public class EngineConfigFactory { - private final CodecService codecService; + private final CodecServiceFactory codecServiceFactory; private final TranslogDeletionPolicyFactory translogDeletionPolicyFactory; /** default ctor primarily used for tests without plugins */ @@ -58,6 +63,8 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti EngineConfigFactory(Collection enginePlugins, IndexSettings idxSettings) { Optional codecService = Optional.empty(); String codecServiceOverridingPlugin = null; + Optional codecServiceFactory = Optional.empty(); + String codecServiceFactoryOverridingPlugin = null; Optional translogDeletionPolicyFactory = Optional.empty(); String translogDeletionPolicyOverridingPlugin = null; for (EnginePlugin enginePlugin : enginePlugins) { @@ -65,7 +72,7 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti if (codecService.isPresent() == false) { codecService = enginePlugin.getCustomCodecService(idxSettings); codecServiceOverridingPlugin = enginePlugin.getClass().getName(); - } else { + } else if (enginePlugin.getCustomCodecService(idxSettings).isPresent()) { throw new IllegalStateException( "existing codec service already overridden in: " + codecServiceOverridingPlugin @@ -76,7 +83,7 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti if (translogDeletionPolicyFactory.isPresent() == false) { translogDeletionPolicyFactory = enginePlugin.getCustomTranslogDeletionPolicyFactory(); translogDeletionPolicyOverridingPlugin = enginePlugin.getClass().getName(); - } else { + } else if (enginePlugin.getCustomTranslogDeletionPolicyFactory().isPresent()) { throw new IllegalStateException( "existing TranslogDeletionPolicyFactory is already overridden in: " + translogDeletionPolicyOverridingPlugin @@ -84,12 +91,37 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti + enginePlugin.getClass().getName() ); } + // get overriding CodecServiceFactory from EnginePlugin + if (codecServiceFactory.isPresent() == false) { + codecServiceFactory = enginePlugin.getCustomCodecServiceFactory(idxSettings); + codecServiceFactoryOverridingPlugin = enginePlugin.getClass().getName(); + } else if (enginePlugin.getCustomCodecServiceFactory(idxSettings).isPresent()) { + throw new IllegalStateException( + "existing codec service factory already overridden in: " + + codecServiceFactoryOverridingPlugin + + " attempting to override again by: " + + enginePlugin.getClass().getName() + ); + } + } + + if (codecService.isPresent() && codecServiceFactory.isPresent()) { + throw new IllegalStateException( + "both codec service and codec service factory are present, codec service provided by: " + + codecServiceOverridingPlugin + + " conflicts with codec service factory provided by: " + + codecServiceFactoryOverridingPlugin + ); } - this.codecService = codecService.orElse(null); + + final CodecService instance = codecService.orElse(null); + this.codecServiceFactory = (instance != null) ? (config) -> instance : codecServiceFactory.orElse(null); this.translogDeletionPolicyFactory = translogDeletionPolicyFactory.orElse((idxs, rtls) -> null); } - /** Instantiates a new EngineConfig from the provided custom overrides */ + /** + * Instantiates a new EngineConfig from the provided custom overrides + */ public EngineConfig newEngineConfig( ShardId shardId, ThreadPool threadPool, @@ -115,6 +147,10 @@ public EngineConfig newEngineConfig( EngineConfig.TombstoneDocSupplier tombstoneDocSupplier, Boolean isReadOnly ) { + CodecService codecServiceToUse = codecService; + if (codecService == null && this.codecServiceFactory != null) { + codecServiceToUse = newCodecServiceOrDefault(indexSettings, null, null, null); + } return new EngineConfig( shardId, @@ -125,7 +161,7 @@ public EngineConfig newEngineConfig( mergePolicy, analyzer, similarity, - this.codecService != null ? this.codecService : codecService, + codecServiceToUse, eventListener, queryCache, queryCachingPolicy, @@ -143,4 +179,15 @@ public EngineConfig newEngineConfig( tombstoneDocSupplier ); } + + public CodecService newCodecServiceOrDefault( + IndexSettings indexSettings, + @Nullable MapperService mapperService, + Logger logger, + CodecService defaultCodecService + ) { + return this.codecServiceFactory != null + ? this.codecServiceFactory.createCodecService(new CodecServiceConfig(indexSettings, mapperService, logger)) + : defaultCodecService; + } } diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index aa1be50499b24..f6ea1c73271dc 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -77,6 +77,7 @@ import org.opensearch.common.Booleans; import org.opensearch.common.Nullable; import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lucene.LoggerInfoStream; import org.opensearch.common.lucene.Lucene; @@ -108,11 +109,10 @@ import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.OpenSearchMergePolicy; import org.opensearch.index.shard.ShardId; -import org.opensearch.index.store.Store; +import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; import org.opensearch.index.translog.TranslogCorruptedException; -import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogStats; import org.opensearch.search.suggest.completion.CompletionStats; @@ -121,7 +121,6 @@ import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; -import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -258,7 +257,7 @@ public InternalEngine(EngineConfig engineConfig) { mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings()); throttle = new IndexThrottle(); try { - trimUnsafeCommits(engineConfig); + store.trimUnsafeCommits(engineConfig.getTranslogConfig().getTranslogPath()); translog = openTranslog(engineConfig, translogDeletionPolicy, engineConfig.getGlobalCheckpointSupplier(), seqNo -> { final LocalCheckpointTracker tracker = getLocalCheckpointTracker(); assert tracker != null || getTranslog().isOpen() == false; @@ -2257,7 +2256,7 @@ public void forceMerge( } @Override - public IndexCommitRef acquireLastIndexCommit(final boolean flushFirst) throws EngineException { + public GatedCloseable acquireLastIndexCommit(final boolean flushFirst) throws EngineException { // we have to flush outside of the readlock otherwise we might have a problem upgrading // the to a write lock when we fail the engine in this operation if (flushFirst) { @@ -2266,7 +2265,7 @@ public IndexCommitRef acquireLastIndexCommit(final boolean flushFirst) throws En logger.trace("finish flush for snapshot"); } final IndexCommit lastCommit = combinedDeletionPolicy.acquireIndexCommit(false); - return new Engine.IndexCommitRef(lastCommit, () -> releaseIndexCommit(lastCommit)); + return new GatedCloseable<>(lastCommit, () -> releaseIndexCommit(lastCommit)); } @Override @@ -2299,9 +2298,9 @@ public SegmentInfos getLatestSegmentInfos() { } @Override - public IndexCommitRef acquireSafeIndexCommit() throws EngineException { + public GatedCloseable acquireSafeIndexCommit() throws EngineException { final IndexCommit safeCommit = combinedDeletionPolicy.acquireIndexCommit(true); - return new Engine.IndexCommitRef(safeCommit, () -> releaseIndexCommit(safeCommit)); + return new GatedCloseable<>(safeCommit, () -> releaseIndexCommit(safeCommit)); } private void releaseIndexCommit(IndexCommit snapshot) throws IOException { @@ -3052,15 +3051,6 @@ private boolean assertMaxSeqNoOfUpdatesIsAdvanced(Term id, long seqNo, boolean a return true; } - private static void trimUnsafeCommits(EngineConfig engineConfig) throws IOException { - final Store store = engineConfig.getStore(); - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); - final Path translogPath = engineConfig.getTranslogConfig().getTranslogPath(); - final long globalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); - final long minRetainedTranslogGen = Translog.readMinTranslogGeneration(translogPath, translogUUID); - store.trimUnsafeCommits(globalCheckpoint, minRetainedTranslogGen, engineConfig.getIndexSettings().getIndexVersionCreated()); - } - /** * Restores the live version map and local checkpoint of this engine using documents (including soft-deleted) * after the local checkpoint in the safe commit. This step ensures the live version map and checkpoint tracker diff --git a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java index e936ae10b04ea..eb8536beff152 100644 --- a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java @@ -41,6 +41,7 @@ import org.apache.lucene.store.Lock; import org.opensearch.LegacyESVersion; import org.opensearch.Version; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.util.concurrent.ReleasableLock; @@ -49,9 +50,9 @@ import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.store.Store; +import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; -import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogStats; import org.opensearch.search.suggest.completion.CompletionStats; @@ -419,13 +420,13 @@ public void forceMerge( ) {} @Override - public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) { store.incRef(); - return new IndexCommitRef(indexCommit, store::decRef); + return new GatedCloseable<>(indexCommit, store::decRef); } @Override - public IndexCommitRef acquireSafeIndexCommit() { + public GatedCloseable acquireSafeIndexCommit() { return acquireLastIndexCommit(false); } diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java index d85c18e272793..c6724d1b3063e 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java @@ -153,7 +153,7 @@ public DocumentMapper(MapperService mapperService, Mapping mapping) { this.fieldMappers = MappingLookup.fromMapping(this.mapping, indexAnalyzers.getDefaultIndexAnalyzer()); try { - mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS); + mappingSource = new CompressedXContent(this, ToXContent.EMPTY_PARAMS); } catch (Exception e) { throw new OpenSearchGenerationException("failed to serialize source for type [" + type + "]", e); } diff --git a/server/src/main/java/org/opensearch/index/query/AbstractGeometryQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/AbstractGeometryQueryBuilder.java index b5b4abdbaf118..9281f1767d72d 100644 --- a/server/src/main/java/org/opensearch/index/query/AbstractGeometryQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/AbstractGeometryQueryBuilder.java @@ -35,11 +35,11 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.SetOnce; +import org.opensearch.Version; import org.opensearch.action.ActionListener; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; import org.opensearch.client.Client; -import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.geo.GeoJson; @@ -56,6 +56,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.geometry.Geometry; import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.util.Objects; @@ -66,9 +67,6 @@ */ public abstract class AbstractGeometryQueryBuilder> extends AbstractQueryBuilder { - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [geo_shape] queries. " - + "The type should no longer be specified in the [indexed_shape] section."; - public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes"; public static final String DEFAULT_SHAPE_FIELD_NAME = "shape"; public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS; @@ -80,7 +78,6 @@ public abstract class AbstractGeometryQueryBuilder supplier; protected final String indexedShapeId; - protected final String indexedShapeType; protected Geometry shape; protected String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME; @@ -113,7 +109,7 @@ public abstract class AbstractGeometryQueryBuilder supplier, - String indexedShapeId, - @Nullable String indexedShapeType - ) { + protected AbstractGeometryQueryBuilder(String fieldName, Supplier supplier, String indexedShapeId) { + if (fieldName == null) { + throw new IllegalArgumentException("fieldName is required"); + } + if (supplier == null && indexedShapeId == null) { + throw new IllegalArgumentException("either shape or indexedShapeId is required"); + } + this.fieldName = fieldName; this.shape = null; this.supplier = supplier; this.indexedShapeId = indexedShapeId; - this.indexedShapeType = indexedShapeType; } /** @@ -196,11 +174,13 @@ protected AbstractGeometryQueryBuilder(StreamInput in) throws IOException { if (in.readBoolean()) { shape = GeometryIO.readGeometry(in); indexedShapeId = null; - indexedShapeType = null; } else { shape = null; indexedShapeId = in.readOptionalString(); - indexedShapeType = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readOptionalString(); + assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected type [_doc], got [" + type + "]"; + } indexedShapeIndex = in.readOptionalString(); indexedShapePath = in.readOptionalString(); indexedShapeRouting = in.readOptionalString(); @@ -222,7 +202,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { GeometryIO.writeGeometry(out, shape); } else { out.writeOptionalString(indexedShapeId); - out.writeOptionalString(indexedShapeType); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); + } out.writeOptionalString(indexedShapeIndex); out.writeOptionalString(indexedShapePath); out.writeOptionalString(indexedShapeRouting); @@ -266,17 +248,6 @@ public String indexedShapeId() { return indexedShapeId; } - /** - * @return the document type of the indexed Shape that will be used in the - * Query - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String indexedShapeType() { - return indexedShapeType; - } - /** * Sets the name of the index where the indexed Shape can be found * @@ -382,12 +353,11 @@ public boolean ignoreUnmapped() { /** creates a new ShapeQueryBuilder from the provided field name and shape builder */ protected abstract AbstractGeometryQueryBuilder newShapeQueryBuilder(String fieldName, Geometry shape); - /** creates a new ShapeQueryBuilder from the provided field name, supplier, indexed shape id, and indexed shape type */ + /** creates a new ShapeQueryBuilder from the provided field name, supplier, indexed shape id */ protected abstract AbstractGeometryQueryBuilder newShapeQueryBuilder( String fieldName, Supplier shapeSupplier, - String indexedShapeId, - String indexedShapeType + String indexedShapeId ); @Override @@ -480,9 +450,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep GeoJson.toXContent(shape, builder, params); } else { builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName()).field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId); - if (indexedShapeType != null) { - builder.field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType); - } if (indexedShapeIndex != null) { builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex); } @@ -514,7 +481,6 @@ protected boolean doEquals(AbstractGeometryQueryBuilder other) { && Objects.equals(indexedShapeId, other.indexedShapeId) && Objects.equals(indexedShapeIndex, other.indexedShapeIndex) && Objects.equals(indexedShapePath, other.indexedShapePath) - && Objects.equals(indexedShapeType, other.indexedShapeType) && Objects.equals(indexedShapeRouting, other.indexedShapeRouting) && Objects.equals(relation, other.relation) && Objects.equals(shape, other.shape) @@ -529,7 +495,6 @@ protected int doHashCode() { indexedShapeId, indexedShapeIndex, indexedShapePath, - indexedShapeType, indexedShapeRouting, relation, shape, @@ -552,7 +517,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws listener.onResponse(null); }, listener::onFailure)); }); - return newShapeQueryBuilder(this.fieldName, supplier::get, this.indexedShapeId, this.indexedShapeType).relation(relation); + return newShapeQueryBuilder(this.fieldName, supplier::get, this.indexedShapeId).relation(relation); } return this; } @@ -564,7 +529,6 @@ protected abstract static class ParsedGeometryQueryParams { public ShapeBuilder shape; public String id = null; - public String type = null; public String index = null; public String shapePath = null; public String shapeRouting = null; @@ -608,8 +572,6 @@ public static ParsedGeometryQueryParams parsedParamsFromXContent(XContentParser } else if (token.isValue()) { if (SHAPE_ID_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { params.id = parser.text(); - } else if (SHAPE_TYPE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - params.type = parser.text(); } else if (SHAPE_INDEX_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { params.index = parser.text(); } else if (SHAPE_PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { diff --git a/server/src/main/java/org/opensearch/index/query/GeoShapeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/GeoShapeQueryBuilder.java index 246a1e1dcf921..161c6e64c7bf3 100644 --- a/server/src/main/java/org/opensearch/index/query/GeoShapeQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/GeoShapeQueryBuilder.java @@ -34,7 +34,6 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; -import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.geo.ShapeRelation; @@ -43,7 +42,6 @@ import org.opensearch.common.geo.parsers.ShapeParser; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.geometry.Geometry; @@ -62,8 +60,6 @@ */ public class GeoShapeQueryBuilder extends AbstractGeometryQueryBuilder { public static final String NAME = "geo_shape"; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GeoShapeQueryBuilder.class); - protected static final ParseField STRATEGY_FIELD = new ParseField("strategy"); private SpatialStrategy strategy; @@ -97,31 +93,8 @@ public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) { super(fieldName, shape); } - public GeoShapeQueryBuilder( - String fieldName, - Supplier shapeSupplier, - String indexedShapeId, - @Nullable String indexedShapeType - ) { - super(fieldName, shapeSupplier, indexedShapeId, indexedShapeType); - } - - /** - * Creates a new GeoShapeQueryBuilder whose Query will be against the given - * field name and will use the Shape found with the given ID in the given - * type - * - * @param fieldName - * Name of the field that will be filtered - * @param indexedShapeId - * ID of the indexed Shape that will be used in the Query - * @param indexedShapeType - * Index type of the indexed Shapes - * @deprecated use {@link #GeoShapeQueryBuilder(String, String)} instead - */ - @Deprecated - public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) { - super(fieldName, indexedShapeId, indexedShapeType); + public GeoShapeQueryBuilder(String fieldName, Supplier shapeSupplier, String indexedShapeId) { + super(fieldName, shapeSupplier, indexedShapeId); } /** @@ -223,13 +196,8 @@ protected GeoShapeQueryBuilder newShapeQueryBuilder(String fieldName, Geometry s } @Override - protected GeoShapeQueryBuilder newShapeQueryBuilder( - String fieldName, - Supplier shapeSupplier, - String indexedShapeId, - String indexedShapeType - ) { - return new GeoShapeQueryBuilder(fieldName, shapeSupplier, indexedShapeId, indexedShapeType); + protected GeoShapeQueryBuilder newShapeQueryBuilder(String fieldName, Supplier shapeSupplier, String indexedShapeId) { + return new GeoShapeQueryBuilder(fieldName, shapeSupplier, indexedShapeId); } @Override @@ -291,14 +259,11 @@ public static GeoShapeQueryBuilder fromXContent(XContentParser parser) throws IO ); GeoShapeQueryBuilder builder; - if (pgsqp.type != null) { - deprecationLogger.deprecate("geo_share_query_with_types", TYPES_DEPRECATION_MESSAGE); - } if (pgsqp.shape != null) { builder = new GeoShapeQueryBuilder(pgsqp.fieldName, pgsqp.shape); } else { - builder = new GeoShapeQueryBuilder(pgsqp.fieldName, pgsqp.id, pgsqp.type); + builder = new GeoShapeQueryBuilder(pgsqp.fieldName, pgsqp.id); } if (pgsqp.index != null) { diff --git a/server/src/main/java/org/opensearch/index/query/QueryBuilders.java b/server/src/main/java/org/opensearch/index/query/QueryBuilders.java index 5b386564df1e8..7ea12fdc6406b 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryBuilders.java +++ b/server/src/main/java/org/opensearch/index/query/QueryBuilders.java @@ -33,6 +33,7 @@ package org.opensearch.index.query; import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.ShapeRelation; @@ -452,7 +453,17 @@ public static FunctionScoreQueryBuilder functionScoreQuery(FunctionScoreQueryBui * @param function The function builder used to custom score */ public static FunctionScoreQueryBuilder functionScoreQuery(ScoreFunctionBuilder function) { - return new FunctionScoreQueryBuilder(function); + return functionScoreQuery(function, null); + } + + /** + * A query that allows to define a custom scoring function. + * + * @param function The function builder used to custom score + * @param queryName The query name + */ + public static FunctionScoreQueryBuilder functionScoreQuery(ScoreFunctionBuilder function, @Nullable String queryName) { + return new FunctionScoreQueryBuilder(function, queryName); } /** @@ -675,14 +686,6 @@ public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShap return new GeoShapeQueryBuilder(name, indexedShapeId); } - /** - * @deprecated Types are in the process of being removed, use {@link #geoShapeQuery(String, String)} instead. - */ - @Deprecated - public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId, String indexedShapeType) { - return new GeoShapeQueryBuilder(name, indexedShapeId, indexedShapeType); - } - /** * A filter to filter indexed shapes intersecting with shapes * @@ -711,16 +714,6 @@ public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String inde return builder; } - /** - * @deprecated Types are in the process of being removed, use {@link #geoIntersectionQuery(String, String)} instead. - */ - @Deprecated - public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String indexedShapeId, String indexedShapeType) { - GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); - builder.relation(ShapeRelation.INTERSECTS); - return builder; - } - /** * A filter to filter indexed shapes that are contained by a shape * @@ -749,16 +742,6 @@ public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedSha return builder; } - /** - * @deprecated Types are in the process of being removed, use {@link #geoWithinQuery(String, String)} instead. - */ - @Deprecated - public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId, String indexedShapeType) { - GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); - builder.relation(ShapeRelation.WITHIN); - return builder; - } - /** * A filter to filter indexed shapes that are not intersection with the query shape * @@ -787,16 +770,6 @@ public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedS return builder; } - /** - * @deprecated Types are in the process of being removed, use {@link #geoDisjointQuery(String, String)} instead. - */ - @Deprecated - public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId, String indexedShapeType) { - GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); - builder.relation(ShapeRelation.DISJOINT); - return builder; - } - /** * A filter to filter only documents where a field exists in them. * diff --git a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java index 881323b05e536..8739e48eb411b 100644 --- a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java @@ -43,9 +43,11 @@ import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.opensearch.OpenSearchException; +import org.opensearch.common.Nullable; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.lucene.search.function.Functions; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.script.FilterScript; @@ -153,17 +155,19 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } FilterScript.Factory factory = context.compile(script, FilterScript.CONTEXT); FilterScript.LeafFactory filterScript = factory.newFactory(script.getParams(), context.lookup()); - return new ScriptQuery(script, filterScript); + return new ScriptQuery(script, filterScript, queryName); } static class ScriptQuery extends Query { final Script script; final FilterScript.LeafFactory filterScript; + final String queryName; - ScriptQuery(Script script, FilterScript.LeafFactory filterScript) { + ScriptQuery(Script script, FilterScript.LeafFactory filterScript, @Nullable String queryName) { this.script = script; this.filterScript = filterScript; + this.queryName = queryName; } @Override @@ -171,6 +175,7 @@ public String toString(String field) { StringBuilder buffer = new StringBuilder(); buffer.append("ScriptQuery("); buffer.append(script); + buffer.append(Functions.nameOrEmptyArg(queryName)); buffer.append(")"); return buffer.toString(); } diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunction.java b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunction.java index 8a595dda07979..02d01ef470b61 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunction.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunction.java @@ -33,6 +33,7 @@ package org.opensearch.index.query.functionscore; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; /** * Implement this interface to provide a decay function that is executed on a @@ -45,7 +46,7 @@ public interface DecayFunction { double evaluate(double value, double scale); - Explanation explainFunction(String valueString, double value, double scale); + Explanation explainFunction(String valueString, double value, double scale, @Nullable String functionName); /** * The final scale parameter is computed from the scale parameter given by diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java index 3ddacb1305536..0ee61b34cd279 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.opensearch.OpenSearchParseException; +import org.opensearch.common.Nullable; import org.opensearch.common.ParsingException; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.geo.GeoDistance; @@ -93,10 +94,31 @@ protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Ob this(fieldName, origin, scale, offset, DEFAULT_DECAY); } + /** + * Convenience constructor that converts its parameters into json to parse on the data nodes. + */ + protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, @Nullable String functionName) { + this(fieldName, origin, scale, offset, DEFAULT_DECAY, functionName); + } + /** * Convenience constructor that converts its parameters into json to parse on the data nodes. */ protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { + this(fieldName, origin, scale, offset, decay, null); + } + + /** + * Convenience constructor that converts its parameters into json to parse on the data nodes. + */ + protected DecayFunctionBuilder( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { if (fieldName == null) { throw new IllegalArgumentException("decay function: field name must not be null"); } @@ -123,6 +145,7 @@ protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Ob } catch (IOException e) { throw new IllegalArgumentException("unable to build inner function object", e); } + setFunctionName(functionName); } protected DecayFunctionBuilder(String fieldName, BytesReference functionBytes) { @@ -285,7 +308,16 @@ private AbstractDistanceScoreFunction parseNumberVariable( ); } IndexNumericFieldData numericFieldData = context.getForField(fieldType); - return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); + return new NumericFieldDataScoreFunction( + origin, + scale, + decay, + offset, + getDecayFunction(), + numericFieldData, + mode, + getFunctionName() + ); } private AbstractDistanceScoreFunction parseGeoVariable( @@ -325,7 +357,7 @@ private AbstractDistanceScoreFunction parseGeoVariable( double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT); double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT); IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); - return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode); + return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode, getFunctionName()); } @@ -375,7 +407,16 @@ private AbstractDistanceScoreFunction parseDateVariable( val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = context.getForField(dateFieldType); - return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); + return new NumericFieldDataScoreFunction( + origin, + scale, + decay, + offset, + getDecayFunction(), + numericFieldData, + mode, + getFunctionName() + ); } static class GeoFieldDataScoreFunction extends AbstractDistanceScoreFunction { @@ -392,9 +433,10 @@ static class GeoFieldDataScoreFunction extends AbstractDistanceScoreFunction { double offset, DecayFunction func, IndexGeoPointFieldData fieldData, - MultiValueMode mode + MultiValueMode mode, + @Nullable String functionName ) { - super(scale, decay, offset, func, mode); + super(scale, decay, offset, func, mode, functionName); this.origin = origin; this.fieldData = fieldData; } @@ -485,9 +527,10 @@ static class NumericFieldDataScoreFunction extends AbstractDistanceScoreFunction double offset, DecayFunction func, IndexNumericFieldData fieldData, - MultiValueMode mode + MultiValueMode mode, + @Nullable String functionName ) { - super(scale, decay, offset, func, mode); + super(scale, decay, offset, func, mode, functionName); this.fieldData = fieldData; this.origin = origin; } @@ -569,13 +612,15 @@ public abstract static class AbstractDistanceScoreFunction extends ScoreFunction protected final double offset; private final DecayFunction func; protected final MultiValueMode mode; + protected final String functionName; public AbstractDistanceScoreFunction( double userSuppiedScale, double decay, double offset, DecayFunction func, - MultiValueMode mode + MultiValueMode mode, + @Nullable String functionName ) { super(CombineFunction.MULTIPLY); this.mode = mode; @@ -591,6 +636,7 @@ public AbstractDistanceScoreFunction( throw new IllegalArgumentException(FunctionScoreQueryBuilder.NAME + " : offset must be > 0.0"); } this.offset = offset; + this.functionName = functionName; } /** @@ -624,7 +670,7 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE return Explanation.match( (float) score(docId, subQueryScore.getValue().floatValue()), "Function for field " + getFieldName() + ":", - func.explainFunction(getDistanceString(ctx, docId), value, scale) + func.explainFunction(getDistanceString(ctx, docId), value, scale, functionName) ); } }; diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/ExponentialDecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/ExponentialDecayFunctionBuilder.java index 7f0a9c3a58d59..b78e75762fe11 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/ExponentialDecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/ExponentialDecayFunctionBuilder.java @@ -33,8 +33,10 @@ package org.opensearch.index.query.functionscore; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.lucene.search.function.Functions; import java.io.IOException; @@ -45,6 +47,10 @@ public class ExponentialDecayFunctionBuilder extends DecayFunctionBuilder scoreFunctionBuilder) { - this(new MatchAllQueryBuilder(), new FilterFunctionBuilder[] { new FilterFunctionBuilder(scoreFunctionBuilder) }); + this(scoreFunctionBuilder, null); + } + + /** + * Creates a function_score query that will execute the function provided on all documents + * + * @param scoreFunctionBuilder score function that is executed + * @param queryName the query name + */ + public FunctionScoreQueryBuilder(ScoreFunctionBuilder scoreFunctionBuilder, @Nullable String queryName) { + this( + new MatchAllQueryBuilder().queryName(queryName), + new FilterFunctionBuilder[] { new FilterFunctionBuilder(scoreFunctionBuilder) } + ); } /** @@ -316,15 +340,17 @@ protected Query doToQuery(QueryShardContext context) throws IOException { int i = 0; for (FilterFunctionBuilder filterFunctionBuilder : filterFunctionBuilders) { ScoreFunction scoreFunction = filterFunctionBuilder.getScoreFunction().toFunction(context); - if (filterFunctionBuilder.getFilter().getName().equals(MatchAllQueryBuilder.NAME)) { + final QueryBuilder builder = filterFunctionBuilder.getFilter(); + if (builder.getName().equals(MatchAllQueryBuilder.NAME)) { filterFunctions[i++] = scoreFunction; } else { - Query filter = filterFunctionBuilder.getFilter().toQuery(context); - filterFunctions[i++] = new FunctionScoreQuery.FilterScoreFunction(filter, scoreFunction); + Query filter = builder.toQuery(context); + filterFunctions[i++] = new FunctionScoreQuery.FilterScoreFunction(filter, scoreFunction, builder.queryName()); } } - Query query = this.query.toQuery(context); + final QueryBuilder builder = this.query; + Query query = builder.toQuery(context); if (query == null) { query = new MatchAllDocsQuery(); } @@ -332,12 +358,12 @@ protected Query doToQuery(QueryShardContext context) throws IOException { CombineFunction boostMode = this.boostMode == null ? DEFAULT_BOOST_MODE : this.boostMode; // handle cases where only one score function and no filter was provided. In this case we create a FunctionScoreQuery. if (filterFunctions.length == 0) { - return new FunctionScoreQuery(query, minScore, maxBoost); + return new FunctionScoreQuery(query, builder.queryName(), minScore, maxBoost); } else if (filterFunctions.length == 1 && filterFunctions[0] instanceof FunctionScoreQuery.FilterScoreFunction == false) { - return new FunctionScoreQuery(query, filterFunctions[0], boostMode, minScore, maxBoost); + return new FunctionScoreQuery(query, builder.queryName(), filterFunctions[0], boostMode, minScore, maxBoost); } // in all other cases we create a FunctionScoreQuery with filters - return new FunctionScoreQuery(query, scoreMode, filterFunctions, boostMode, minScore, maxBoost); + return new FunctionScoreQuery(query, builder.queryName(), scoreMode, filterFunctions, boostMode, minScore, maxBoost); } /** @@ -606,6 +632,7 @@ private static String parseFiltersAndFunctions( QueryBuilder filter = null; ScoreFunctionBuilder scoreFunction = null; Float functionWeight = null; + String functionName = null; if (token != XContentParser.Token.START_OBJECT) { throw new ParsingException( parser.getTokenLocation(), @@ -635,6 +662,8 @@ private static String parseFiltersAndFunctions( } else if (token.isValue()) { if (WEIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { functionWeight = parser.floatValue(); + } else if (NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + functionName = parser.text(); } else { throw new ParsingException( parser.getTokenLocation(), @@ -652,6 +681,10 @@ private static String parseFiltersAndFunctions( scoreFunction.setWeight(functionWeight); } } + + if (functionName != null && scoreFunction != null) { + scoreFunction.setFunctionName(functionName); + } } if (filter == null) { filter = new MatchAllQueryBuilder(); diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/GaussDecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/GaussDecayFunctionBuilder.java index c208083da08f5..ac6ae33cb4ed0 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/GaussDecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/GaussDecayFunctionBuilder.java @@ -33,9 +33,11 @@ package org.opensearch.index.query.functionscore; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.lucene.search.function.Functions; import java.io.IOException; @@ -49,10 +51,25 @@ public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, super(fieldName, origin, scale, offset); } + public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, @Nullable String functionName) { + super(fieldName, origin, scale, offset, functionName); + } + public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { super(fieldName, origin, scale, offset, decay); } + public GaussDecayFunctionBuilder( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + super(fieldName, origin, scale, offset, decay, functionName); + } + GaussDecayFunctionBuilder(String fieldName, BytesReference functionBytes) { super(fieldName, functionBytes); } @@ -75,7 +92,6 @@ public DecayFunction getDecayFunction() { } private static final class GaussScoreFunction implements DecayFunction { - @Override public double evaluate(double value, double scale) { // note that we already computed scale^2 in processScale() so we do @@ -84,8 +100,11 @@ public double evaluate(double value, double scale) { } @Override - public Explanation explainFunction(String valueExpl, double value, double scale) { - return Explanation.match((float) evaluate(value, scale), "exp(-0.5*pow(" + valueExpl + ",2.0)/" + -1 * scale + ")"); + public Explanation explainFunction(String valueExpl, double value, double scale, @Nullable String functionName) { + return Explanation.match( + (float) evaluate(value, scale), + "exp(-0.5*pow(" + valueExpl + ",2.0)/" + -1 * scale + Functions.nameOrEmptyArg(functionName) + ")" + ); } @Override diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/LinearDecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/LinearDecayFunctionBuilder.java index 762757eb156e4..03102e45a41ba 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/LinearDecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/LinearDecayFunctionBuilder.java @@ -33,8 +33,10 @@ package org.opensearch.index.query.functionscore; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.lucene.search.function.Functions; import java.io.IOException; @@ -47,10 +49,25 @@ public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, super(fieldName, origin, scale, offset); } + public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, @Nullable String functionName) { + super(fieldName, origin, scale, offset, functionName); + } + public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { super(fieldName, origin, scale, offset, decay); } + public LinearDecayFunctionBuilder( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + super(fieldName, origin, scale, offset, decay, functionName); + } + LinearDecayFunctionBuilder(String fieldName, BytesReference functionBytes) { super(fieldName, functionBytes); } @@ -80,8 +97,11 @@ public double evaluate(double value, double scale) { } @Override - public Explanation explainFunction(String valueExpl, double value, double scale) { - return Explanation.match((float) evaluate(value, scale), "max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + ")"); + public Explanation explainFunction(String valueExpl, double value, double scale, @Nullable String functionName) { + return Explanation.match( + (float) evaluate(value, scale), + "max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + Functions.nameOrEmptyArg(functionName) + ")" + ); } @Override diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/RandomScoreFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/RandomScoreFunctionBuilder.java index 730be404feb14..26495c93082ae 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/RandomScoreFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/RandomScoreFunctionBuilder.java @@ -31,6 +31,7 @@ package org.opensearch.index.query.functionscore; +import org.opensearch.common.Nullable; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -58,6 +59,10 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder> implements ToXContentFragment, NamedWriteable { private Float weight; + private String functionName; /** * Standard empty constructor. @@ -58,11 +60,17 @@ public ScoreFunctionBuilder() {} */ public ScoreFunctionBuilder(StreamInput in) throws IOException { weight = checkWeight(in.readOptionalFloat()); + if (in.getVersion().onOrAfter(Version.V_1_3_0)) { + functionName = in.readOptionalString(); + } } @Override public final void writeTo(StreamOutput out) throws IOException { out.writeOptionalFloat(weight); + if (out.getVersion().onOrAfter(Version.V_1_3_0)) { + out.writeOptionalString(functionName); + } doWriteTo(out); } @@ -99,11 +107,30 @@ public final Float getWeight() { return weight; } + /** + * The name of this function + */ + public String getFunctionName() { + return functionName; + } + + /** + * Set the name of this function + */ + public void setFunctionName(String functionName) { + this.functionName = functionName; + } + @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (weight != null) { builder.field(FunctionScoreQueryBuilder.WEIGHT_FIELD.getPreferredName(), weight); } + + if (functionName != null) { + builder.field(FunctionScoreQueryBuilder.NAME_FIELD.getPreferredName(), functionName); + } + doXContent(builder, params); return builder; } @@ -128,7 +155,7 @@ public final boolean equals(Object obj) { } @SuppressWarnings("unchecked") FB other = (FB) obj; - return Objects.equals(weight, other.getWeight()) && doEquals(other); + return Objects.equals(weight, other.getWeight()) && Objects.equals(functionName, other.getFunctionName()) && doEquals(other); } /** @@ -139,7 +166,7 @@ public final boolean equals(Object obj) { @Override public final int hashCode() { - return Objects.hash(getClass(), weight, doHashCode()); + return Objects.hash(getClass(), weight, functionName, doHashCode()); } /** @@ -156,7 +183,7 @@ public final ScoreFunction toFunction(QueryShardContext context) throws IOExcept if (weight == null) { return scoreFunction; } - return new WeightFactorFunction(weight, scoreFunction); + return new WeightFactorFunction(weight, scoreFunction, getFunctionName()); } /** diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilders.java b/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilders.java index 54dca40208c00..59d02e9381d7e 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilders.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilders.java @@ -32,6 +32,7 @@ package org.opensearch.index.query.functionscore; +import org.opensearch.common.Nullable; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -46,10 +47,29 @@ public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fi return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, null); } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction( + String fieldName, + Object origin, + Object scale, + @Nullable String functionName + ) { + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, null, functionName); + } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fieldName, Object origin, Object scale, Object offset) { return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset); } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + @Nullable String functionName + ) { + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset, functionName); + } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction( String fieldName, Object origin, @@ -60,10 +80,30 @@ public static ExponentialDecayFunctionBuilder exponentialDecayFunction( return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset, decay); } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset, decay, functionName); + } + public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Object origin, Object scale) { return new GaussDecayFunctionBuilder(fieldName, origin, scale, null); } + public static GaussDecayFunctionBuilder gaussDecayFunction( + String fieldName, + Object origin, + Object scale, + @Nullable String functionName + ) { + return new GaussDecayFunctionBuilder(fieldName, origin, scale, null, functionName); + } + public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Object origin, Object scale, Object offset) { return new GaussDecayFunctionBuilder(fieldName, origin, scale, offset); } @@ -72,6 +112,26 @@ public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Obj return new GaussDecayFunctionBuilder(fieldName, origin, scale, offset, decay); } + public static GaussDecayFunctionBuilder gaussDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + return new GaussDecayFunctionBuilder(fieldName, origin, scale, offset, decay, functionName); + } + + public static LinearDecayFunctionBuilder linearDecayFunction( + String fieldName, + Object origin, + Object scale, + @Nullable String functionName + ) { + return new LinearDecayFunctionBuilder(fieldName, origin, scale, null, functionName); + } + public static LinearDecayFunctionBuilder linearDecayFunction(String fieldName, Object origin, Object scale) { return new LinearDecayFunctionBuilder(fieldName, origin, scale, null); } @@ -80,6 +140,16 @@ public static LinearDecayFunctionBuilder linearDecayFunction(String fieldName, O return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset); } + public static LinearDecayFunctionBuilder linearDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + @Nullable String functionName + ) { + return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset, functionName); + } + public static LinearDecayFunctionBuilder linearDecayFunction( String fieldName, Object origin, @@ -90,23 +160,54 @@ public static LinearDecayFunctionBuilder linearDecayFunction( return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset, decay); } + public static LinearDecayFunctionBuilder linearDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset, decay, functionName); + } + public static ScriptScoreFunctionBuilder scriptFunction(Script script) { - return (new ScriptScoreFunctionBuilder(script)); + return scriptFunction(script, null); } public static ScriptScoreFunctionBuilder scriptFunction(String script) { - return (new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, script, emptyMap()))); + return scriptFunction(script, null); } public static RandomScoreFunctionBuilder randomFunction() { - return new RandomScoreFunctionBuilder(); + return randomFunction(null); } public static WeightBuilder weightFactorFunction(float weight) { - return (WeightBuilder) (new WeightBuilder().setWeight(weight)); + return weightFactorFunction(weight, null); } public static FieldValueFactorFunctionBuilder fieldValueFactorFunction(String fieldName) { - return new FieldValueFactorFunctionBuilder(fieldName); + return fieldValueFactorFunction(fieldName, null); + } + + public static ScriptScoreFunctionBuilder scriptFunction(Script script, @Nullable String functionName) { + return new ScriptScoreFunctionBuilder(script, functionName); + } + + public static ScriptScoreFunctionBuilder scriptFunction(String script, @Nullable String functionName) { + return new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, script, emptyMap()), functionName); + } + + public static RandomScoreFunctionBuilder randomFunction(@Nullable String functionName) { + return new RandomScoreFunctionBuilder(functionName); + } + + public static WeightBuilder weightFactorFunction(float weight, @Nullable String functionName) { + return (WeightBuilder) (new WeightBuilder(functionName).setWeight(weight)); + } + + public static FieldValueFactorFunctionBuilder fieldValueFactorFunction(String fieldName, @Nullable String functionName) { + return new FieldValueFactorFunctionBuilder(fieldName, functionName); } } diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/ScriptScoreFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/ScriptScoreFunctionBuilder.java index 8b6cbe3a1bafd..2701e5867edde 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/ScriptScoreFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/ScriptScoreFunctionBuilder.java @@ -32,6 +32,7 @@ package org.opensearch.index.query.functionscore; +import org.opensearch.common.Nullable; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -57,10 +58,15 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { */ public WeightBuilder() {} + /** + * Standard constructor. + */ + public WeightBuilder(@Nullable String functionName) { + setFunctionName(functionName); + } + /** * Read from a stream. */ diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 2099df55bee3b..87984477e57e2 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -51,9 +51,9 @@ import org.apache.lucene.util.SetOnce; import org.apache.lucene.util.ThreadInterruptedException; import org.opensearch.Assertions; +import org.opensearch.ExceptionsHelper; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchException; -import org.opensearch.ExceptionsHelper; import org.opensearch.action.ActionListener; import org.opensearch.action.ActionRunnable; import org.opensearch.action.admin.indices.flush.FlushRequest; @@ -73,6 +73,7 @@ import org.opensearch.common.CheckedRunnable; import org.opensearch.common.Nullable; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; @@ -1484,7 +1485,7 @@ public org.apache.lucene.util.Version minimumCompatibleVersion() { * * @param flushFirst true if the index should first be flushed to disk / a low level lucene commit should be executed */ - public Engine.IndexCommitRef acquireLastIndexCommit(boolean flushFirst) throws EngineException { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) throws EngineException { final IndexShardState state = this.state; // one time volatile read // we allow snapshot on closed index shard, since we want to do one after we close the shard and before we close the engine if (state == IndexShardState.STARTED || state == IndexShardState.CLOSED) { @@ -1529,7 +1530,7 @@ public void updateCurrentInfos(long gen, byte[] infosBytes, long seqNo) throws I * Snapshots the most recent safe index commit from the currently running engine. * All index files referenced by this index commit won't be freed until the commit/snapshot is closed. */ - public Engine.IndexCommitRef acquireSafeIndexCommit() throws EngineException { + public GatedCloseable acquireSafeIndexCommit() throws EngineException { final IndexShardState state = this.state; // one time volatile read // we allow snapshot on closed index shard, since we want to do one after we close the shard and before we close the engine if (state == IndexShardState.STARTED || state == IndexShardState.CLOSED) { @@ -1554,7 +1555,7 @@ public Engine.IndexCommitRef acquireSafeIndexCommit() throws EngineException { */ public Store.MetadataSnapshot snapshotStoreMetadata() throws IOException { assert Thread.holdsLock(mutex) == false : "snapshotting store metadata under mutex"; - Engine.IndexCommitRef indexCommit = null; + GatedCloseable wrappedIndexCommit = null; store.incRef(); try { synchronized (engineMutex) { @@ -1562,16 +1563,16 @@ public Store.MetadataSnapshot snapshotStoreMetadata() throws IOException { // the engine on us. If the engine is running, we can get a snapshot via the deletion policy of the engine. final Engine engine = getEngineOrNull(); if (engine != null) { - indexCommit = engine.acquireLastIndexCommit(false); + wrappedIndexCommit = engine.acquireLastIndexCommit(false); } - if (indexCommit == null) { + if (wrappedIndexCommit == null) { return store.getMetadata(null, true); } } - return store.getMetadata(indexCommit.getIndexCommit()); + return store.getMetadata(wrappedIndexCommit.get()); } finally { store.decRef(); - IOUtils.close(indexCommit); + IOUtils.close(wrappedIndexCommit); } } @@ -2294,7 +2295,7 @@ public ShardPath shardPath() { } public void recoverFromLocalShards( - BiConsumer mappingUpdateConsumer, + Consumer mappingUpdateConsumer, List localShards, ActionListener listener ) throws IOException { @@ -3044,7 +3045,7 @@ public void startRecovery( PeerRecoveryTargetService peerRecoveryTargetService, PeerRecoveryTargetService.RecoveryListener recoveryListener, RepositoriesService repositoriesService, - BiConsumer mappingUpdateConsumer, + Consumer mappingUpdateConsumer, IndicesService indicesService ) { // TODO: Create a proper object to encapsulate the recovery context @@ -3312,7 +3313,7 @@ private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { indexSettings.getMergePolicy(), mapperService != null ? mapperService.indexAnalyzer() : null, similarityService.similarity(mapperService), - codecService, + engineConfigFactory.newCodecServiceOrDefault(indexSettings, mapperService, logger, codecService), shardEventListener, indexCache != null ? indexCache.query() : null, cachingPolicy, @@ -4137,7 +4138,7 @@ void resetEngineToGlobalCheckpoint() throws IOException { true ) { @Override - public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) { synchronized (engineMutex) { if (newEngineReference.get() == null) { throw new AlreadyClosedException("engine was closed"); @@ -4148,7 +4149,7 @@ public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { } @Override - public IndexCommitRef acquireSafeIndexCommit() { + public GatedCloseable acquireSafeIndexCommit() { synchronized (engineMutex) { if (newEngineReference.get() == null) { throw new AlreadyClosedException("engine was closed"); diff --git a/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java b/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java index 148c39df070e8..98556db3ae138 100644 --- a/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java +++ b/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java @@ -32,6 +32,7 @@ package org.opensearch.index.shard; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; @@ -39,6 +40,7 @@ import org.apache.lucene.store.Lock; import org.apache.lucene.store.NoLockFactory; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.index.Index; import org.opensearch.index.engine.Engine; import org.opensearch.index.store.Store; @@ -52,7 +54,7 @@ final class LocalShardSnapshot implements Closeable { private final IndexShard shard; private final Store store; - private final Engine.IndexCommitRef indexCommit; + private final GatedCloseable wrappedIndexCommit; private final AtomicBoolean closed = new AtomicBoolean(false); LocalShardSnapshot(IndexShard shard) { @@ -61,7 +63,7 @@ final class LocalShardSnapshot implements Closeable { store.incRef(); boolean success = false; try { - indexCommit = shard.acquireLastIndexCommit(true); + wrappedIndexCommit = shard.acquireLastIndexCommit(true); success = true; } finally { if (success == false) { @@ -88,7 +90,7 @@ Directory getSnapshotDirectory() { return new FilterDirectory(store.directory()) { @Override public String[] listAll() throws IOException { - Collection fileNames = indexCommit.getIndexCommit().getFileNames(); + Collection fileNames = wrappedIndexCommit.get().getFileNames(); final String[] fileNameArray = fileNames.toArray(new String[fileNames.size()]); return fileNameArray; } @@ -143,7 +145,7 @@ public Set getPendingDeletions() throws IOException { public void close() throws IOException { if (closed.compareAndSet(false, true)) { try { - indexCommit.close(); + wrappedIndexCommit.close(); } finally { store.decRef(); } @@ -156,6 +158,6 @@ IndexMetadata getIndexMetadata() { @Override public String toString() { - return "local_shard_snapshot:[" + shard.shardId() + " indexCommit: " + indexCommit + "]"; + return "local_shard_snapshot:[" + shard.shardId() + " indexCommit: " + wrappedIndexCommit + "]"; } } diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 485d43d9a470f..6cf6ad645ca00 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -72,7 +72,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.stream.Collectors; import static org.opensearch.common.unit.TimeValue.timeValueMillis; @@ -116,9 +116,9 @@ void recoverFromStore(final IndexShard indexShard, ActionListener liste } void recoverFromLocalShards( - BiConsumer mappingUpdateConsumer, + Consumer mappingUpdateConsumer, IndexShard indexShard, - List shards, + final List shards, ActionListener listener ) { if (canRecover(indexShard)) { @@ -133,7 +133,7 @@ void recoverFromLocalShards( } IndexMetadata sourceMetadata = shards.get(0).getIndexMetadata(); for (ObjectObjectCursor mapping : sourceMetadata.getMappings()) { - mappingUpdateConsumer.accept(mapping.key, mapping.value); + mappingUpdateConsumer.accept(mapping.value); } indexShard.mapperService().merge(sourceMetadata, MapperService.MergeReason.MAPPING_RECOVERY); // now that the mapping is merged we can validate the index sort configuration. diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index 893d87e874b4a..e4896e2b81963 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -1628,27 +1628,16 @@ public void ensureIndexHasHistoryUUID() throws IOException { * commit on the replica will cause exception as the new last commit c3 will have recovery_translog_gen=1. The recovery * translog generation of a commit is calculated based on the current local checkpoint. The local checkpoint of c3 is 1 * while the local checkpoint of c2 is 2. - *

        - * 3. Commit without translog can be used in recovery. An old index, which was created before multiple-commits is introduced - * (v6.2), may not have a safe commit. If that index has a snapshotted commit without translog and an unsafe commit, - * the policy can consider the snapshotted commit as a safe commit for recovery even the commit does not have translog. */ - public void trimUnsafeCommits( - final long lastSyncedGlobalCheckpoint, - final long minRetainedTranslogGen, - final org.opensearch.Version indexVersionCreated - ) throws IOException { + public void trimUnsafeCommits(final Path translogPath) throws IOException { metadataLock.writeLock().lock(); try { final List existingCommits = DirectoryReader.listCommits(directory); - if (existingCommits.isEmpty()) { - throw new IllegalArgumentException("No index found to trim"); - } - final IndexCommit lastIndexCommitCommit = existingCommits.get(existingCommits.size() - 1); - final String translogUUID = lastIndexCommitCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); - final IndexCommit startingIndexCommit; - // TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint. - startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); + assert existingCommits.isEmpty() == false : "No index found to trim"; + final IndexCommit lastIndexCommit = existingCommits.get(existingCommits.size() - 1); + final String translogUUID = lastIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); + final long lastSyncedGlobalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); + final IndexCommit startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); if (translogUUID.equals(startingIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY)) == false) { throw new IllegalStateException( @@ -1659,7 +1648,7 @@ public void trimUnsafeCommits( + "]" ); } - if (startingIndexCommit.equals(lastIndexCommitCommit) == false) { + if (startingIndexCommit.equals(lastIndexCommit) == false) { try (IndexWriter writer = newAppendingIndexWriter(directory, startingIndexCommit)) { // this achieves two things: // - by committing a new commit based on the starting commit, it make sure the starting commit will be opened diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 5bf0cd6a2faf7..2fb399b84153a 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -864,14 +864,13 @@ public IndexShard createShard( recoveryTargetService, recoveryListener, repositoriesService, - (type, mapping) -> { + mapping -> { assert recoveryState.getRecoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS : "mapping update consumer only required by local shards recovery"; client.admin() .indices() .preparePutMapping() .setConcreteIndex(shardRouting.index()) // concrete index - no name clash, it uses uuid - .setType(type) .setSource(mapping.source().string(), XContentType.JSON) .get(); }, diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index 6c972d78772ca..30b1f058ea72e 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -222,7 +222,7 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi logger.trace("not running recovery with id [{}] - can not find it (probably finished)", recoveryId); return; } - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); timer = recoveryTarget.state().getTimer(); cancellableThreads = recoveryTarget.cancellableThreads(); if (preExistingRequest == null) { @@ -363,7 +363,7 @@ public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, return; } - recoveryRef.target().prepareForTranslogOperations(request.totalTranslogOps(), listener); + recoveryRef.get().prepareForTranslogOperations(request.totalTranslogOps(), listener); } } } @@ -378,7 +378,7 @@ public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportCh return; } - recoveryRef.target().finalizeRecovery(request.globalCheckpoint(), request.trimAboveSeqNo(), listener); + recoveryRef.get().finalizeRecovery(request.globalCheckpoint(), request.trimAboveSeqNo(), listener); } } } @@ -389,7 +389,7 @@ class HandoffPrimaryContextRequestHandler implements TransportRequestHandler listener = createOrFinishListener( recoveryRef, channel, @@ -423,7 +423,7 @@ private void performTranslogOps( final ActionListener listener, final RecoveryRef recoveryRef ) { - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); final Consumer retryOnMappingException = exception -> { @@ -488,7 +488,7 @@ public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel c return; } - recoveryRef.target() + recoveryRef.get() .receiveFileInfo( request.phase1FileNames, request.phase1FileSizes, @@ -511,7 +511,7 @@ public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel return; } - recoveryRef.target() + recoveryRef.get() .cleanFiles(request.totalTranslogOps(), request.getGlobalCheckpoint(), request.sourceMetaSnapshot(), listener); } } @@ -525,7 +525,7 @@ class FileChunkTransportRequestHandler implements TransportRequestHandler listener = createOrFinishListener(recoveryRef, channel, Actions.FILE_CHUNK, request); if (listener == null) { return; @@ -575,7 +575,7 @@ private ActionListener createOrFinishListener( final RecoveryTransportRequest request, final CheckedFunction responseFn ) { - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); final ActionListener channelListener = new ChannelActionListener<>(channel, action, request); final ActionListener voidListener = ActionListener.map(channelListener, responseFn); @@ -611,7 +611,7 @@ public void onFailure(Exception e) { logger.error(() -> new ParameterizedMessage("unexpected error during recovery [{}], failing shard", recoveryId), e); onGoingRecoveries.failRecovery( recoveryId, - new RecoveryFailedException(recoveryRef.target().state(), "unexpected error", e), + new RecoveryFailedException(recoveryRef.get().state(), "unexpected error", e), true // be safe ); } else { diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java index 0fa2bc29c09fc..3c197a8e33eb6 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java @@ -36,6 +36,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchTimeoutException; import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.common.concurrent.GatedAutoCloseable; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; @@ -48,7 +49,6 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicBoolean; /** * This class holds a collection of all on going recoveries on the current node (i.e., the node is the target node @@ -178,7 +178,7 @@ public RecoveryRef getRecoverySafe(long id, ShardId shardId) { if (recoveryRef == null) { throw new IndexShardClosedException(shardId); } - assert recoveryRef.target().shardId().equals(shardId); + assert recoveryRef.get().shardId().equals(shardId); return recoveryRef; } @@ -273,29 +273,15 @@ public boolean cancelRecoveriesForShard(ShardId shardId, String reason) { * causes {@link RecoveryTarget#decRef()} to be called. This makes sure that the underlying resources * will not be freed until {@link RecoveryRef#close()} is called. */ - public static class RecoveryRef implements AutoCloseable { - - private final RecoveryTarget status; - private final AtomicBoolean closed = new AtomicBoolean(false); + public static class RecoveryRef extends GatedAutoCloseable { /** * Important: {@link RecoveryTarget#tryIncRef()} should * be *successfully* called on status before */ public RecoveryRef(RecoveryTarget status) { - this.status = status; - this.status.setLastAccessTime(); - } - - @Override - public void close() { - if (closed.compareAndSet(false, true)) { - status.decRef(); - } - } - - public RecoveryTarget target() { - return status; + super(status, status::decRef); + status.setLastAccessTime(); } } diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java index dcb7024ae8c75..7899b11330a34 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java @@ -57,6 +57,7 @@ import org.opensearch.common.StopWatch; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; import org.opensearch.common.logging.Loggers; @@ -64,11 +65,10 @@ import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; -import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.concurrent.FutureUtils; import org.opensearch.common.util.concurrent.ListenableFuture; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.internal.io.IOUtils; -import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.RecoveryEngineException; import org.opensearch.index.seqno.ReplicationTracker; import org.opensearch.index.seqno.RetentionLease; @@ -250,10 +250,10 @@ && isTargetSameHistory() sendFileStep.onResponse(SendFileResult.EMPTY); } } else { - final Engine.IndexCommitRef safeCommitRef; + final GatedCloseable wrappedSafeCommit; try { - safeCommitRef = acquireSafeCommit(shard); - resources.add(safeCommitRef); + wrappedSafeCommit = acquireSafeCommit(shard); + resources.add(wrappedSafeCommit); } catch (final Exception e) { throw new RecoveryEngineException(shard.shardId(), 1, "snapshot failed", e); } @@ -268,16 +268,16 @@ && isTargetSameHistory() // advances and not when creating a new safe commit. In any case this is a best-effort thing since future recoveries can // always fall back to file-based ones, and only really presents a problem if this primary fails before things have settled // down. - startingSeqNo = Long.parseLong(safeCommitRef.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1L; + startingSeqNo = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1L; logger.trace("performing file-based recovery followed by history replay starting at [{}]", startingSeqNo); try { final int estimateNumOps = estimateNumberOfHistoryOperations(startingSeqNo); final Releasable releaseStore = acquireStore(shard.store()); resources.add(releaseStore); - sendFileStep.whenComplete(r -> IOUtils.close(safeCommitRef, releaseStore), e -> { + sendFileStep.whenComplete(r -> IOUtils.close(wrappedSafeCommit, releaseStore), e -> { try { - IOUtils.close(safeCommitRef, releaseStore); + IOUtils.close(wrappedSafeCommit, releaseStore); } catch (final IOException ex) { logger.warn("releasing snapshot caused exception", ex); } @@ -307,7 +307,7 @@ && isTargetSameHistory() deleteRetentionLeaseStep.whenComplete(ignored -> { assert Transports.assertNotTransportThread(RecoverySourceHandler.this + "[phase1]"); - phase1(safeCommitRef.getIndexCommit(), startingSeqNo, () -> estimateNumOps, sendFileStep); + phase1(wrappedSafeCommit.get(), startingSeqNo, () -> estimateNumOps, sendFileStep); }, onFailure); } catch (final Exception e) { @@ -467,12 +467,12 @@ private Releasable acquireStore(Store store) { * with the file systems due to interrupt (see {@link org.apache.lucene.store.NIOFSDirectory} javadocs for more detail). * This method acquires a safe commit and wraps it to make sure that it will be released using the generic thread pool. */ - private Engine.IndexCommitRef acquireSafeCommit(IndexShard shard) { - final Engine.IndexCommitRef commitRef = shard.acquireSafeIndexCommit(); + private GatedCloseable acquireSafeCommit(IndexShard shard) { + final GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit(); final AtomicBoolean closed = new AtomicBoolean(false); - return new Engine.IndexCommitRef(commitRef.getIndexCommit(), () -> { + return new GatedCloseable<>(wrappedSafeCommit.get(), () -> { if (closed.compareAndSet(false, true)) { - runWithGenericThreadPool(commitRef::close); + runWithGenericThreadPool(wrappedSafeCommit::close); } }); } diff --git a/server/src/main/java/org/opensearch/plugins/EnginePlugin.java b/server/src/main/java/org/opensearch/plugins/EnginePlugin.java index ee285e8be8c2f..4c3a07d7b98d9 100644 --- a/server/src/main/java/org/opensearch/plugins/EnginePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/EnginePlugin.java @@ -34,6 +34,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceFactory; import org.opensearch.index.engine.EngineFactory; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.translog.TranslogDeletionPolicy; @@ -63,11 +64,26 @@ public interface EnginePlugin { * to determine if a custom {@link CodecService} should be provided for the given index. A plugin that is not overriding * the {@link CodecService} through the plugin can ignore this method and the Codec specified in the {@link IndexSettings} * will be used. + * + * @deprecated Please use {@code getCustomCodecServiceFactory()} instead as it provides more context for {@link CodecService} + * instance construction. */ + @Deprecated default Optional getCustomCodecService(IndexSettings indexSettings) { return Optional.empty(); } + /** + * EXPERT: + * When an index is created this method is invoked for each engine plugin. Engine plugins can inspect the index settings + * to determine if a custom {@link CodecServiceFactory} should be provided for the given index. A plugin that is not overriding + * the {@link CodecServiceFactory} through the plugin can ignore this method and the default Codec specified in the + * {@link IndexSettings} will be used. + */ + default Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { + return Optional.empty(); + } + /** * When an index is created this method is invoked for each engine plugin. Engine plugins that need to provide a * custom {@link TranslogDeletionPolicy} can override this method to return a function that takes the {@link IndexSettings} diff --git a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java index f2e345314ee10..4ee209111bdcb 100644 --- a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java @@ -80,13 +80,6 @@ public abstract class BaseRestHandler implements RestHandler { @Deprecated protected Logger logger = LogManager.getLogger(getClass()); - /** - * Parameter that controls whether certain REST apis should include type names in their requests or responses. - * Note: Support for this parameter will be removed after the transition period to typeless APIs. - */ - public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; - public static final boolean DEFAULT_INCLUDE_TYPE_NAME_POLICY = false; - public final long getUsageCount() { return usageCount.sum(); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java index 6cc72388758a2..5b628bc094c41 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -35,7 +35,6 @@ import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.support.ActiveShardCount; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.index.mapper.MapperService; @@ -53,9 +52,6 @@ import static org.opensearch.rest.RestRequest.Method.PUT; public class RestCreateIndexAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCreateIndexAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in create " - + "index requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { @@ -69,17 +65,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("create_index_with_types", TYPES_DEPRECATION_MESSAGE); - } - CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index")); if (request.hasContent()) { Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - sourceAsMap = prepareMappings(sourceAsMap, includeTypeName); + sourceAsMap = prepareMappings(sourceAsMap); createIndexRequest.source(sourceAsMap, LoggingDeprecationHandler.INSTANCE); } @@ -89,8 +79,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.admin().indices().create(createIndexRequest, new RestToXContentListener<>(channel)); } - static Map prepareMappings(Map source, boolean includeTypeName) { - if (includeTypeName || source.containsKey("mappings") == false || (source.get("mappings") instanceof Map) == false) { + static Map prepareMappings(Map source) { + if (source.containsKey("mappings") == false || (source.get("mappings") instanceof Map) == false) { return source; } @@ -99,12 +89,7 @@ static Map prepareMappings(Map source, boolean i @SuppressWarnings("unchecked") Map mappings = (Map) source.get("mappings"); if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { - throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." - ); + throw new IllegalArgumentException("The mapping definition cannot be nested under a type"); } newSource.put("mappings", singletonMap(MapperService.SINGLE_MAPPING_NAME, mappings)); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 8b04e0b66dfae..d0610d790999b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -60,11 +60,8 @@ import static org.opensearch.rest.RestStatus.OK; public class RestGetFieldMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetFieldMappingAction.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " - + "field mapping requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { @@ -85,14 +82,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC getMappingsRequest.indices(indices).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false)); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (includeTypeName) { - throw new IllegalArgumentException(INCLUDE_TYPE_NAME_PARAMETER + " no longer supports the value [true]."); - } - deprecationLogger.deprecate("get_field_mapping_with_types", TYPES_DEPRECATION_MESSAGE); - } - if (request.hasParam("local")) { deprecationLogger.deprecate( "get_field_mapping_local", @@ -117,7 +106,7 @@ public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBui status = NOT_FOUND; } response.toXContent(builder, request); - return new BytesRestResponse(RestStatus.OK, builder); + return new BytesRestResponse(status, builder); } }); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java index de985cb9c372b..71e7ed098cf8d 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java @@ -36,16 +36,13 @@ import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.set.Sets; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestStatus; import org.opensearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; @@ -61,13 +58,6 @@ */ public class RestGetIndexTemplateAction extends BaseRestHandler { - private static final Set RESPONSE_PARAMETERS = Collections.unmodifiableSet( - Sets.union(Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER), Settings.FORMAT_PARAMS) - ); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndexTemplateAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying include_type_name in get index template requests is deprecated."; - @Override public List routes() { return unmodifiableList( @@ -85,9 +75,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final String[] names = Strings.splitStringByCommaToArray(request.param("name")); final GetIndexTemplatesRequest getIndexTemplatesRequest = new GetIndexTemplatesRequest(names); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("get_index_template_include_type_name", TYPES_DEPRECATION_MESSAGE); - } getIndexTemplatesRequest.local(request.paramAsBoolean("local", getIndexTemplatesRequest.local())); getIndexTemplatesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getIndexTemplatesRequest.masterNodeTimeout())); @@ -106,7 +93,7 @@ protected RestStatus getStatus(final GetIndexTemplatesResponse response) { @Override protected Set responseParams() { - return RESPONSE_PARAMETERS; + return Settings.FORMAT_PARAMS; } } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java index 0647221c8b6a0..37c8162c6d31b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -36,18 +36,14 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; @@ -59,15 +55,6 @@ */ public class RestGetIndicesAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndicesAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using `include_type_name` in get indices requests" - + " is deprecated. The parameter will be removed in the next major version."; - - private static final Set allowedResponseParameters = Collections.unmodifiableSet( - Stream.concat(Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER).stream(), Settings.FORMAT_PARAMS.stream()) - .collect(Collectors.toSet()) - ); - @Override public List routes() { return unmodifiableList(asList(new Route(GET, "/{index}"), new Route(HEAD, "/{index}"))); @@ -81,10 +68,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - // starting with 7.0 we don't include types by default in the response to GET requests - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER) && request.method().equals(GET)) { - deprecationLogger.deprecate("get_indices_with_types", TYPES_DEPRECATION_MESSAGE); - } final GetIndexRequest getIndexRequest = new GetIndexRequest(); getIndexRequest.indices(indices); getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); @@ -101,6 +84,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ @Override protected Set responseParams() { - return allowedResponseParameters; + return Settings.FORMAT_PARAMS; } } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java index f196eb4e41d6d..f4f33905408e7 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java @@ -32,8 +32,6 @@ package org.opensearch.rest.action.admin.indices; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchTimeoutException; import org.opensearch.action.ActionRunnable; import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -41,7 +39,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.rest.BaseRestHandler; @@ -61,10 +58,6 @@ import static org.opensearch.rest.RestRequest.Method.GET; public class RestGetMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetMappingAction.class); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get" - + " mapping requests is deprecated. The parameter will be removed in the next major version."; private final ThreadPool threadPool; diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index da89691c60c9d..f17ac495b494b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -53,11 +53,7 @@ import static org.opensearch.rest.RestRequest.Method.PUT; public class RestPutIndexTemplateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutIndexTemplateAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying include_type_name in put index template requests is deprecated." - + " The parameter will be removed in the next major version."; @Override public List routes() { @@ -71,12 +67,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name")); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("put_index_template_with_types", TYPES_DEPRECATION_MESSAGE); - } if (request.hasParam("template")) { deprecationLogger.deprecate( "put_index_template_deprecated_parameter", @@ -92,7 +83,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC putRequest.cause(request.param("cause", "")); Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap, includeTypeName); + sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap); putRequest.source(sourceAsMap); return channel -> client.admin().indices().putTemplate(putRequest, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java index 5da0b016c867d..f65dea1ebe3d2 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java @@ -36,7 +36,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.BaseRestHandler; @@ -54,9 +53,6 @@ import static org.opensearch.rest.RestRequest.Method.PUT; public class RestPutMappingAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutMappingAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in put " - + "mapping requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { @@ -79,17 +75,9 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); - - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE); - } - - putMappingRequest.type(MapperService.SINGLE_MAPPING_NAME); - Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - if (includeTypeName == false && MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap)) { + if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap)) { throw new IllegalArgumentException("Types cannot be provided in put mapping requests"); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java index bfa34b1bea763..08b84cc6fe6cc 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -65,12 +65,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { + if (request.hasParam("include_type_name")) { deprecationLogger.deprecate("index_rollover_with_types", TYPES_DEPRECATION_MESSAGE); } RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index")); - request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(includeTypeName, parser)); + request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(parser)); rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false)); rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestValidateQueryAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestValidateQueryAction.java index d40712e9d2ca2..bf300d5395b79 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -39,7 +39,6 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; @@ -58,9 +57,6 @@ import static org.opensearch.rest.RestStatus.OK; public class RestValidateQueryAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestValidateQueryAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in validate query requests is deprecated."; - @Override public List routes() { return unmodifiableList( @@ -68,9 +64,7 @@ public List routes() { new Route(GET, "/_validate/query"), new Route(POST, "/_validate/query"), new Route(GET, "/{index}/_validate/query"), - new Route(POST, "/{index}/_validate/query"), - new Route(GET, "/{index}/{type}/_validate/query"), - new Route(POST, "/{index}/{type}/_validate/query") + new Route(POST, "/{index}/_validate/query") ) ); } @@ -86,11 +80,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC validateQueryRequest.indicesOptions(IndicesOptions.fromRequest(request, validateQueryRequest.indicesOptions())); validateQueryRequest.explain(request.paramAsBoolean("explain", false)); - if (request.hasParam("type")) { - deprecationLogger.deprecate("validate_query_with_types", TYPES_DEPRECATION_MESSAGE); - validateQueryRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); - } - validateQueryRequest.rewrite(request.paramAsBoolean("rewrite", false)); validateQueryRequest.allShards(request.paramAsBoolean("all_shards", false)); diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestMultiTermVectorsAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestMultiTermVectorsAction.java index 2c52e75dc47b3..66b0c004b9fb4 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestMultiTermVectorsAction.java @@ -36,7 +36,6 @@ import org.opensearch.action.termvectors.TermVectorsRequest; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; @@ -50,8 +49,6 @@ import static org.opensearch.rest.RestRequest.Method.POST; public class RestMultiTermVectorsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestTermVectorsAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying types in multi term vector requests is deprecated."; @Override public List routes() { @@ -60,10 +57,7 @@ public List routes() { new Route(GET, "/_mtermvectors"), new Route(POST, "/_mtermvectors"), new Route(GET, "/{index}/_mtermvectors"), - new Route(POST, "/{index}/_mtermvectors"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_mtermvectors"), - new Route(POST, "/{index}/{type}/_mtermvectors") + new Route(POST, "/{index}/_mtermvectors") ) ); } diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestTermVectorsAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestTermVectorsAction.java index 36f9e43e71362..727b90474082e 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestTermVectorsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestTermVectorsAction.java @@ -66,12 +66,7 @@ public List routes() { new Route(GET, "/{index}/_termvectors"), new Route(POST, "/{index}/_termvectors"), new Route(GET, "/{index}/_termvectors/{id}"), - new Route(POST, "/{index}/_termvectors/{id}"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_termvectors"), - new Route(POST, "/{index}/{type}/_termvectors"), - new Route(GET, "/{index}/{type}/{id}/_termvectors"), - new Route(POST, "/{index}/{type}/{id}/_termvectors") + new Route(POST, "/{index}/_termvectors/{id}") ) ); } diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java index 04ee5fdd5b621..6cb00633de441 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java @@ -66,10 +66,7 @@ public List routes() { new Route(GET, "/_count"), new Route(POST, "/_count"), new Route(GET, "/{index}/_count"), - new Route(POST, "/{index}/_count"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_count"), - new Route(POST, "/{index}/{type}/_count") + new Route(POST, "/{index}/_count") ) ); } diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestMultiSearchAction.java index 8c711b31d0720..8262f8199a704 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestMultiSearchAction.java @@ -91,10 +91,7 @@ public List routes() { new Route(GET, "/_msearch"), new Route(POST, "/_msearch"), new Route(GET, "/{index}/_msearch"), - new Route(POST, "/{index}/_msearch"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_msearch"), - new Route(POST, "/{index}/{type}/_msearch") + new Route(POST, "/{index}/_msearch") ) ); } diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java index e0c984cec5430..b84200407462f 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java @@ -42,7 +42,6 @@ import org.opensearch.common.Booleans; import org.opensearch.common.Strings; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.query.QueryBuilder; import org.opensearch.rest.BaseRestHandler; @@ -89,9 +88,6 @@ public class RestSearchAction extends BaseRestHandler { RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSearchAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in search requests is deprecated."; - @Override public String getName() { return "search_action"; @@ -104,10 +100,7 @@ public List routes() { new Route(GET, "/_search"), new Route(POST, "/_search"), new Route(GET, "/{index}/_search"), - new Route(POST, "/{index}/_search"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_search"), - new Route(POST, "/{index}/{type}/_search") + new Route(POST, "/{index}/_search") ) ); } diff --git a/server/src/main/java/org/opensearch/script/ExplainableScoreScript.java b/server/src/main/java/org/opensearch/script/ExplainableScoreScript.java index fb7dd7ded501b..6ea3a322449e5 100644 --- a/server/src/main/java/org/opensearch/script/ExplainableScoreScript.java +++ b/server/src/main/java/org/opensearch/script/ExplainableScoreScript.java @@ -33,6 +33,7 @@ package org.opensearch.script; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; import java.io.IOException; @@ -49,7 +50,21 @@ public interface ExplainableScoreScript { * want to explain how that was computed. * * @param subQueryScore the Explanation for _score + * @deprecated please use {@code explain(Explanation subQueryScore, @Nullable String scriptName)} */ + @Deprecated Explanation explain(Explanation subQueryScore) throws IOException; + /** + * Build the explanation of the current document being scored + * The script score needs the Explanation of the sub query score because it might use _score and + * want to explain how that was computed. + * + * @param subQueryScore the Explanation for _score + * @param scriptName the script name + */ + default Explanation explain(Explanation subQueryScore, @Nullable String scriptName) throws IOException { + return explain(subQueryScore); + } + } diff --git a/server/src/main/java/org/opensearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/opensearch/script/ScriptedMetricAggContexts.java index 7e85c5cdd725d..1f187f7f0e8f5 100644 --- a/server/src/main/java/org/opensearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/opensearch/script/ScriptedMetricAggContexts.java @@ -126,7 +126,7 @@ public Map getState() { return state; } - // Return the doc as a map (instead of LeafDocLookup) in order to abide by type whitelisting rules for + // Return the doc as a map (instead of LeafDocLookup) in order to abide by type allowlisting rules for // Painless scripts. public Map> getDoc() { return leafLookup == null ? null : leafLookup.doc(); diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java index 094c706053703..ac0baf18dfb55 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java @@ -163,7 +163,7 @@ public static > ConstructingO this.valuesField = clone.valuesField; } - AbstractPercentilesAggregationBuilder(StreamInput in) throws IOException { + AbstractPercentilesAggregationBuilder(StreamInput in, ParseField valuesField) throws IOException { super(in); values = in.readDoubleArray(); keyed = in.readBoolean(); @@ -175,6 +175,7 @@ public static > ConstructingO PercentilesMethod method = PercentilesMethod.readFromStream(in); percentilesConfig = PercentilesConfig.fromLegacy(method, compression, numberOfSignificantValueDigits); } + this.valuesField = valuesField; } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java index d8f3c80f36ff3..037830c63ecdf 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java @@ -82,7 +82,7 @@ private PercentileRanksAggregationBuilder(String name, double[] values, Percenti } public PercentileRanksAggregationBuilder(StreamInput in) throws IOException { - super(in); + super(in, VALUES_FIELD); } private PercentileRanksAggregationBuilder( diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java index 85e8d37d3fdba..323723bb6deb1 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java @@ -80,7 +80,7 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { } public PercentilesAggregationBuilder(StreamInput in) throws IOException { - super(in); + super(in, PERCENTS_FIELD); } public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/opensearch/search/aggregations/pipeline/MovingFunctions.java b/server/src/main/java/org/opensearch/search/aggregations/pipeline/MovingFunctions.java index a9f3d06c0ea90..e3a57654f94cf 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/pipeline/MovingFunctions.java +++ b/server/src/main/java/org/opensearch/search/aggregations/pipeline/MovingFunctions.java @@ -192,7 +192,7 @@ public static double holt(double[] values, double alpha, double beta) { } /** - * Version of holt that can "forecast", not exposed as a whitelisted function for moving_fn scripts, but + * Version of holt that can "forecast", not exposed as an allowlisted function for moving_fn scripts, but * here as compatibility/code sharing for existing moving_avg agg. Can be removed when moving_avg is gone. */ public static double[] holtForecast(double[] values, double alpha, double beta, int numForecasts) { @@ -268,7 +268,7 @@ public static double holtWinters(double[] values, double alpha, double beta, dou } /** - * Version of holt-winters that can "forecast", not exposed as a whitelisted function for moving_fn scripts, but + * Version of holt-winters that can "forecast", not exposed as an allowlisted function for moving_fn scripts, but * here as compatibility/code sharing for existing moving_avg agg. Can be removed when moving_avg is gone. */ public static double[] holtWintersForecast( diff --git a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java index 3b765cf179821..b6c0b63efe3d3 100644 --- a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java @@ -50,6 +50,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; import org.opensearch.core.internal.io.IOUtils; @@ -368,25 +369,25 @@ private void snapshot( } final Repository repository = repositoriesService.repository(snapshot.getRepository()); - Engine.IndexCommitRef snapshotRef = null; + GatedCloseable wrappedSnapshot = null; try { // we flush first to make sure we get the latest writes snapshotted - snapshotRef = indexShard.acquireLastIndexCommit(true); - final IndexCommit snapshotIndexCommit = snapshotRef.getIndexCommit(); + wrappedSnapshot = indexShard.acquireLastIndexCommit(true); + final IndexCommit snapshotIndexCommit = wrappedSnapshot.get(); repository.snapshotShard( indexShard.store(), indexShard.mapperService(), snapshot.getSnapshotId(), indexId, - snapshotRef.getIndexCommit(), + wrappedSnapshot.get(), getShardStateId(indexShard, snapshotIndexCommit), snapshotStatus, version, userMetadata, - ActionListener.runBefore(listener, snapshotRef::close) + ActionListener.runBefore(listener, wrappedSnapshot::close) ); } catch (Exception e) { - IOUtils.close(snapshotRef); + IOUtils.close(wrappedSnapshot); throw e; } } catch (Exception e) { diff --git a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java index 8767ce95d3352..60de452c3149e 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java @@ -146,7 +146,6 @@ public void onFailure(Exception e) { client.admin() .indices() .preparePutMapping(TASK_INDEX) - .setType(TASK_TYPE) .setSource(taskResultIndexMapping(), XContentType.JSON) .execute(ActionListener.delegateFailure(listener, (l, r) -> doStoreResult(taskResult, listener))); } else { @@ -169,7 +168,7 @@ private int getTaskResultMappingVersion(IndexMetadata metadata) { } private void doStoreResult(TaskResult taskResult, ActionListener listener) { - IndexRequestBuilder index = client.prepareIndex(TASK_INDEX, TASK_TYPE, taskResult.getTask().getTaskId().toString()); + IndexRequestBuilder index = client.prepareIndex(TASK_INDEX).setId(taskResult.getTask().getTaskId().toString()); try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { taskResult.toXContent(builder, ToXContent.EMPTY_PARAMS); index.setSource(builder); diff --git a/server/src/test/java/org/opensearch/action/ShardValidateQueryRequestTests.java b/server/src/test/java/org/opensearch/action/ShardValidateQueryRequestTests.java index 2b6c5284546cf..726f77b0ddf13 100644 --- a/server/src/test/java/org/opensearch/action/ShardValidateQueryRequestTests.java +++ b/server/src/test/java/org/opensearch/action/ShardValidateQueryRequestTests.java @@ -69,17 +69,15 @@ public void testSerialize() throws IOException { validateQueryRequest.query(QueryBuilders.termQuery("field", "value")); validateQueryRequest.rewrite(true); validateQueryRequest.explain(false); - validateQueryRequest.types("type1", "type2"); ShardValidateQueryRequest request = new ShardValidateQueryRequest( new ShardId("index", "foobar", 1), - new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), new String[] { "alias0", "alias1" }), + new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), "alias0", "alias1"), validateQueryRequest ); request.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { ShardValidateQueryRequest readRequest = new ShardValidateQueryRequest(in); assertEquals(request.filteringAliases(), readRequest.filteringAliases()); - assertArrayEquals(request.types(), readRequest.types()); assertEquals(request.explain(), readRequest.explain()); assertEquals(request.query(), readRequest.query()); assertEquals(request.rewrite(), readRequest.rewrite()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java index cfe3e9779314f..de69be636c327 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -35,11 +35,9 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.common.Strings; -import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.XContentBuilder; @@ -47,16 +45,12 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.RandomCreateIndexGenerator; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; import java.util.Map; import java.util.Set; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.CoreMatchers.equalTo; public class CreateIndexRequestTests extends OpenSearchTestCase { @@ -101,36 +95,6 @@ public void testTopLevelKeys() { assertEquals("unknown key [FOO_SHOULD_BE_ILLEGAL_HERE] for create index", e.getMessage()); } - public void testToXContent() throws IOException { - CreateIndexRequest request = new CreateIndexRequest("foo"); - - String mapping; - if (randomBoolean()) { - mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("my_type").endObject().endObject()); - } else { - mapping = Strings.toString(JsonXContent.contentBuilder().startObject().endObject()); - } - request.mapping("my_type", mapping, XContentType.JSON); - - Alias alias = new Alias("test_alias"); - alias.routing("1"); - alias.filter("{\"term\":{\"year\":2016}}"); - alias.writeIndex(true); - request.alias(alias); - - Settings.Builder settings = Settings.builder(); - settings.put(SETTING_NUMBER_OF_SHARDS, 10); - request.settings(settings); - - String actualRequestBody = Strings.toString(request); - - String expectedRequestBody = "{\"settings\":{\"index\":{\"number_of_shards\":\"10\"}}," - + "\"mappings\":{\"my_type\":{\"my_type\":{}}}," - + "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\",\"is_write_index\":true}}}"; - - assertEquals(expectedRequestBody, actualRequestBody); - } - public void testMappingKeyedByType() throws IOException { CreateIndexRequest request1 = new CreateIndexRequest("foo"); CreateIndexRequest request2 = new CreateIndexRequest("bar"); @@ -196,25 +160,6 @@ public void testMappingKeyedByType() throws IOException { } } - public void testToAndFromXContent() throws IOException { - - final CreateIndexRequest createIndexRequest = RandomCreateIndexGenerator.randomCreateIndexRequest(); - - boolean humanReadable = randomBoolean(); - final XContentType xContentType = randomFrom(XContentType.values()); - BytesReference originalBytes = toShuffledXContent(createIndexRequest, xContentType, EMPTY_PARAMS, humanReadable); - - CreateIndexRequest parsedCreateIndexRequest = new CreateIndexRequest(); - parsedCreateIndexRequest.source(originalBytes, xContentType); - - assertMappingsEqual(createIndexRequest.mappings(), parsedCreateIndexRequest.mappings()); - assertAliasesEqual(createIndexRequest.aliases(), parsedCreateIndexRequest.aliases()); - assertEquals(createIndexRequest.settings(), parsedCreateIndexRequest.settings()); - - BytesReference finalBytes = toShuffledXContent(parsedCreateIndexRequest, xContentType, EMPTY_PARAMS, humanReadable); - OpenSearchAssertions.assertToXContentEquivalent(originalBytes, finalBytes, xContentType); - } - public void testSettingsType() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject().startArray("settings").endArray().endObject(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/get/GetIndexResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/get/GetIndexResponseTests.java index 7b03a4bc7bc64..f712b93b409dc 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/get/GetIndexResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/get/GetIndexResponseTests.java @@ -41,26 +41,16 @@ import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.RandomCreateIndexGenerator; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.test.AbstractSerializingTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Locale; -import java.util.function.Predicate; -public class GetIndexResponseTests extends AbstractSerializingTestCase { - - @Override - protected GetIndexResponse doParseInstance(XContentParser parser) throws IOException { - return GetIndexResponse.fromXContent(parser); - } +public class GetIndexResponseTests extends AbstractWireSerializingTestCase { @Override protected Writeable.Reader instanceReader() { @@ -70,7 +60,7 @@ protected Writeable.Reader instanceReader() { @Override protected GetIndexResponse createTestInstance() { String[] indices = generateRandomStringArray(5, 5, false, false); - ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); @@ -78,9 +68,7 @@ protected GetIndexResponse createTestInstance() { IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS; boolean includeDefaults = randomBoolean(); for (String index : indices) { - // rarely have no types - int typeCount = rarely() ? 0 : 1; - mappings.put(index, GetMappingsResponseTests.createMappingsForIndex(typeCount, true)); + mappings.put(index, GetMappingsResponseTests.createMappingsForIndex()); List aliasMetadataList = new ArrayList<>(); int aliasesNum = randomIntBetween(0, 3); @@ -111,19 +99,4 @@ protected GetIndexResponse createTestInstance() { dataStreams.build() ); } - - @Override - protected Predicate getRandomFieldsExcludeFilter() { - // we do not want to add new fields at the root (index-level), or inside the blocks - return f -> f.equals("") || f.contains(".settings") || f.contains(".defaults") || f.contains(".mappings") || f.contains(".aliases"); - } - - /** - * For xContent roundtrip testing we force the xContent output to still contain types because the parser still expects them. - * The new typeless parsing is implemented in the client side GetIndexResponse. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "true")); - } } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 2c1ed98bbaeac..99e4b5a2cca89 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -37,22 +37,14 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.test.AbstractSerializingTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.function.Predicate; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.hamcrest.CoreMatchers.equalTo; - -public class GetFieldMappingsResponseTests extends AbstractSerializingTestCase { +public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCase { public void testManualSerialization() throws IOException { Map>> mappings = new HashMap<>(); @@ -71,51 +63,6 @@ public void testManualSerialization() throws IOException { } } - public void testManualJunkedJson() throws Exception { - // in fact random fields could be evaluated as proper mapping, while proper junk in this case is arrays and values - final String json = "{\"index1\":{\"mappings\":" - + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," - // junk here - + "\"junk1\": [\"field1\", {\"field2\":{}}]," - + "\"junk2\": [{\"field3\":{}}]," - + "\"junk3\": 42," - + "\"junk4\": \"Q\"," - + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}," - + "\"index0\":{\"mappings\":" - + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," - + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}}"; - - final XContentParser parser = XContentType.JSON.xContent() - .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, json.getBytes("UTF-8")); - - final GetFieldMappingsResponse response = GetFieldMappingsResponse.fromXContent(parser); - - FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{\"type\":\"keyword\"}")); - Map fieldMapping = new HashMap<>(); - fieldMapping.put("field0", fieldMappingMetadata); - fieldMapping.put("field1", fieldMappingMetadata); - - Map> typeMapping = new HashMap<>(); - typeMapping.put("doctype0", fieldMapping); - typeMapping.put("doctype1", fieldMapping); - - Map>> mappings = new HashMap<>(); - mappings.put("index0", typeMapping); - mappings.put("index1", typeMapping); - - final Map>> responseMappings = response.mappings(); - assertThat(responseMappings, equalTo(mappings)); - } - - @Override - protected GetFieldMappingsResponse doParseInstance(XContentParser parser) throws IOException { - return GetFieldMappingsResponse.fromXContent(parser); - } - @Override protected GetFieldMappingsResponse createTestInstance() { return new GetFieldMappingsResponse(randomMapping()); @@ -126,23 +73,6 @@ protected Writeable.Reader instanceReader() { return GetFieldMappingsResponse::new; } - @Override - protected Predicate getRandomFieldsExcludeFilter() { - // allow random fields at the level of `index` and `index.mappings.doctype.field` - // otherwise random field could be evaluated as index name or type name - return s -> false == (s.matches("(?[^.]+)") - || s.matches("(?[^.]+)\\.mappings\\.(?[^.]+)\\.(?[^.]+)")); - } - - /** - * For xContent roundtrip testing we force the xContent output to still contain types because the parser - * still expects them. The new typeless parsing is implemented in the client side GetFieldMappingsResponse. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(INCLUDE_TYPE_NAME_PARAMETER, "true")); - } - private Map>> randomMapping() { Map>> mappings = new HashMap<>(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java index 56dfbeffc21ac..5dd05789429bf 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java @@ -32,65 +32,35 @@ package org.opensearch.action.admin.indices.mapping.get; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContent.Params; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.mapper.MapperService; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.test.AbstractSerializingTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; import org.opensearch.test.EqualsHashCodeTestUtils; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; -import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Objects; -public class GetMappingsResponseTests extends AbstractSerializingTestCase { - - @Override - protected boolean supportsUnknownFields() { - return false; - } +public class GetMappingsResponseTests extends AbstractWireSerializingTestCase { public void testCheckEqualsAndHashCode() { GetMappingsResponse resp = createTestInstance(); EqualsHashCodeTestUtils.checkEqualsAndHashCode(resp, r -> new GetMappingsResponse(r.mappings()), GetMappingsResponseTests::mutate); } - @Override - protected GetMappingsResponse doParseInstance(XContentParser parser) throws IOException { - return GetMappingsResponse.fromXContent(parser); - } - @Override protected Writeable.Reader instanceReader() { return GetMappingsResponse::new; } - private static GetMappingsResponse mutate(GetMappingsResponse original) throws IOException { - ImmutableOpenMap.Builder> builder = ImmutableOpenMap.builder(original.mappings()); + private static GetMappingsResponse mutate(GetMappingsResponse original) { + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(original.mappings()); String indexKey = original.mappings().keys().iterator().next().value; + builder.put(indexKey + "1", createMappingsForIndex()); - ImmutableOpenMap.Builder typeBuilder = ImmutableOpenMap.builder(original.mappings().get(indexKey)); - final String typeKey; - Iterator> iter = original.mappings().get(indexKey).keys().iterator(); - if (iter.hasNext()) { - typeKey = iter.next().value; - } else { - typeKey = "new-type"; - } - - typeBuilder.put(typeKey, new MappingMetadata("type-" + randomAlphaOfLength(6), randomFieldMapping())); - - builder.put(indexKey, typeBuilder.build()); return new GetMappingsResponse(builder.build()); } @@ -99,48 +69,23 @@ protected GetMappingsResponse mutateInstance(GetMappingsResponse instance) throw return mutate(instance); } - public static ImmutableOpenMap createMappingsForIndex(int typeCount, boolean randomTypeName) { - List typeMappings = new ArrayList<>(typeCount); - - for (int i = 0; i < typeCount; i++) { - if (rarely() == false) { // rarely have no fields - Map mappings = new HashMap<>(); - mappings.put("field-" + i, randomFieldMapping()); - if (randomBoolean()) { - mappings.put("field2-" + i, randomFieldMapping()); - } - - try { - String typeName = MapperService.SINGLE_MAPPING_NAME; - if (randomTypeName) { - typeName = "type-" + randomAlphaOfLength(5); - } - MappingMetadata mmd = new MappingMetadata(typeName, mappings); - typeMappings.add(mmd); - } catch (IOException e) { - fail("shouldn't have failed " + e); - } + public static MappingMetadata createMappingsForIndex() { + Map mappings = new HashMap<>(); + if (rarely() == false) { // rarely have no fields + mappings.put("field", randomFieldMapping()); + if (randomBoolean()) { + mappings.put("field2", randomFieldMapping()); } + String typeName = MapperService.SINGLE_MAPPING_NAME; + return new MappingMetadata(typeName, mappings); } - ImmutableOpenMap.Builder typeBuilder = ImmutableOpenMap.builder(); - typeMappings.forEach(mmd -> typeBuilder.put(mmd.type(), mmd)); - return typeBuilder.build(); - } - - /** - * For xContent roundtrip testing we force the xContent output to still contain types because the parser - * still expects them. The new typeless parsing is implemented in the client side GetMappingsResponse. - */ - @Override - protected Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "true")); + return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mappings); } @Override protected GetMappingsResponse createTestInstance() { - ImmutableOpenMap.Builder> indexBuilder = ImmutableOpenMap.builder(); - int typeCount = rarely() ? 0 : 1; - indexBuilder.put("index-" + randomAlphaOfLength(5), createMappingsForIndex(typeCount, randomBoolean())); + ImmutableOpenMap.Builder indexBuilder = ImmutableOpenMap.builder(); + indexBuilder.put("index-" + randomAlphaOfLength(5), createMappingsForIndex()); GetMappingsResponse resp = new GetMappingsResponse(indexBuilder.build()); logger.debug("--> created: {}", resp); return resp; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index b45e7d1225017..fd6fc3b6839d7 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -66,13 +66,9 @@ public class PutMappingRequestTests extends OpenSearchTestCase { public void testValidation() { - PutMappingRequest r = new PutMappingRequest("myindex").type(""); + PutMappingRequest r = new PutMappingRequest("myindex"); ActionRequestValidationException ex = r.validate(); - assertNotNull("type validation should fail", ex); - assertTrue(ex.getMessage().contains("type is empty")); - r.type("mytype"); - ex = r.validate(); assertNotNull("source validation should fail", ex); assertTrue(ex.getMessage().contains("source is missing")); @@ -96,21 +92,20 @@ public void testValidation() { } /** - * Test that {@link PutMappingRequest#buildFromSimplifiedDef(String, Object...)} + * Test that {@link PutMappingRequest#buildFromSimplifiedDef(Object...)} * rejects inputs where the {@code Object...} varargs of field name and properties are not * paired correctly */ public void testBuildFromSimplifiedDef() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> PutMappingRequest.buildFromSimplifiedDef("type", "only_field") + () -> PutMappingRequest.buildFromSimplifiedDef("only_field") ); assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage()); } public void testToXContent() throws IOException { PutMappingRequest request = new PutMappingRequest("foo"); - request.type("my_type"); XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); mapping.startObject("properties"); @@ -128,7 +123,6 @@ public void testToXContent() throws IOException { public void testToXContentWithEmptySource() throws IOException { PutMappingRequest request = new PutMappingRequest("foo"); - request.type("my_type"); String actualRequestBody = Strings.toString(request); String expectedRequestBody = "{}"; @@ -166,10 +160,7 @@ private static PutMappingRequest createTestItem() throws IOException { String index = randomAlphaOfLength(5); PutMappingRequest request = new PutMappingRequest(index); - - String type = randomAlphaOfLength(5); - request.type(type); - request.source(RandomCreateIndexGenerator.randomMapping(type)); + request.source(RandomCreateIndexGenerator.randomMapping("_doc")); return request; } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java index 8e5c3d9f59a86..1e8dc2f031058 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -34,7 +34,6 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.admin.indices.create.CreateIndexRequest; -import org.opensearch.action.admin.indices.create.CreateIndexRequestTests; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; @@ -54,7 +53,6 @@ import org.opensearch.indices.IndicesModule; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.XContentTestUtils; -import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; import org.junit.Before; @@ -64,7 +62,6 @@ import java.util.Map; import java.util.function.Consumer; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.equalTo; public class RolloverRequestTests extends OpenSearchTestCase { @@ -87,7 +84,7 @@ public void testConditionsParsing() throws Exception { .field("max_size", "45gb") .endObject() .endObject(); - request.fromXContent(false, createParser(builder)); + request.fromXContent(createParser(builder)); Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(3)); MaxAgeCondition maxAgeCondition = (MaxAgeCondition) conditions.get(MaxAgeCondition.NAME); @@ -107,7 +104,6 @@ public void testParsingWithIndexSettings() throws Exception { .field("max_docs", 100) .endObject() .startObject("mappings") - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "string") @@ -115,7 +111,6 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject() .endObject() - .endObject() .startObject("settings") .field("number_of_shards", 10) .endObject() @@ -124,7 +119,7 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject() .endObject(); - request.fromXContent(true, createParser(builder)); + request.fromXContent(createParser(builder)); Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(2)); assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1)); @@ -145,8 +140,7 @@ public void testTypelessMappingParsing() throws Exception { .endObject() .endObject(); - boolean includeTypeName = false; - request.fromXContent(includeTypeName, createParser(builder)); + request.fromXContent(createParser(builder)); CreateIndexRequest createIndexRequest = request.getCreateIndexRequest(); String mapping = createIndexRequest.mappings().get(MapperService.SINGLE_MAPPING_NAME); @@ -182,27 +176,6 @@ public void testSerialize() throws Exception { } } - public void testToAndFromXContent() throws IOException { - RolloverRequest rolloverRequest = createTestItem(); - - final XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(rolloverRequest, xContentType, EMPTY_PARAMS, humanReadable); - - RolloverRequest parsedRolloverRequest = new RolloverRequest(); - parsedRolloverRequest.fromXContent(true, createParser(xContentType.xContent(), originalBytes)); - - CreateIndexRequest createIndexRequest = rolloverRequest.getCreateIndexRequest(); - CreateIndexRequest parsedCreateIndexRequest = parsedRolloverRequest.getCreateIndexRequest(); - CreateIndexRequestTests.assertMappingsEqual(createIndexRequest.mappings(), parsedCreateIndexRequest.mappings()); - CreateIndexRequestTests.assertAliasesEqual(createIndexRequest.aliases(), parsedCreateIndexRequest.aliases()); - assertEquals(createIndexRequest.settings(), parsedCreateIndexRequest.settings()); - assertEquals(rolloverRequest.getConditions(), parsedRolloverRequest.getConditions()); - - BytesReference finalBytes = toShuffledXContent(parsedRolloverRequest, xContentType, EMPTY_PARAMS, humanReadable); - OpenSearchAssertions.assertToXContentEquivalent(originalBytes, finalBytes, xContentType); - } - public void testUnknownFields() throws IOException { final RolloverRequest request = new RolloverRequest(); XContentType xContentType = randomFrom(XContentType.values()); @@ -215,7 +188,7 @@ public void testUnknownFields() throws IOException { } builder.endObject(); BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, BytesReference.bytes(builder), null, random()); - expectThrows(XContentParseException.class, () -> request.fromXContent(false, createParser(xContentType.xContent(), mutated))); + expectThrows(XContentParseException.class, () -> request.fromXContent(createParser(xContentType.xContent(), mutated))); } public void testSameConditionCanOnlyBeAddedOnce() { @@ -244,8 +217,8 @@ public void testValidation() { private static RolloverRequest createTestItem() throws IOException { RolloverRequest rolloverRequest = new RolloverRequest(); if (randomBoolean()) { - String type = randomAlphaOfLengthBetween(3, 10); - rolloverRequest.getCreateIndexRequest().mapping(type, RandomCreateIndexGenerator.randomMapping(type)); + rolloverRequest.getCreateIndexRequest() + .mapping(MapperService.SINGLE_MAPPING_NAME, RandomCreateIndexGenerator.randomMapping(MapperService.SINGLE_MAPPING_NAME)); } if (randomBoolean()) { RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java index 7a47f2575ae6a..ca3b1f3f3815d 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java @@ -65,7 +65,7 @@ public void setupIndex() { int numDocs = scaledRandomIntBetween(100, 1000); for (int j = 0; j < numDocs; ++j) { String id = Integer.toString(j); - client().prepareIndex("test", "type1", id).setSource("text", "sometext").get(); + client().prepareIndex("test").setId(id).setSource("text", "sometext").get(); } client().admin().indices().prepareFlush("test").get(); client().admin().indices().prepareRefresh().get(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java index 0cf9f9fe152d6..6a84c5894fceb 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java @@ -92,7 +92,7 @@ public void testSegmentStats() throws Exception { .setSettings(Settings.builder().put("index.store.type", storeType.getSettingsKey())) ); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("foo", "bar", "bar", "baz", "baz", 42).get(); + client().prepareIndex("test").setId("1").setSource("foo", "bar", "bar", "baz", "baz", 42).get(); client().admin().indices().prepareRefresh("test").get(); IndicesStatsResponse rsp = client().admin().indices().prepareStats("test").get(); @@ -101,7 +101,7 @@ public void testSegmentStats() throws Exception { assertThat(stats.getCount(), greaterThan(0L)); // now check multiple segments stats are merged together - client().prepareIndex("test", "doc", "2").setSource("foo", "bar", "bar", "baz", "baz", 43).get(); + client().prepareIndex("test").setId("2").setSource("foo", "bar", "bar", "baz", "baz", 43).get(); client().admin().indices().prepareRefresh("test").get(); rsp = client().admin().indices().prepareStats("test").get(); @@ -129,7 +129,8 @@ public void testRefreshListeners() throws Exception { createIndex("test", Settings.builder().put("refresh_interval", -1).build()); // Index a document asynchronously so the request will only return when document is refreshed - ActionFuture index = client().prepareIndex("test", "test", "test") + ActionFuture index = client().prepareIndex("test") + .setId("test") .setSource("test", "test") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .execute(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java index 6f30781ab9bbe..7f62861d4f332 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java @@ -32,31 +32,23 @@ package org.opensearch.action.admin.indices.template.get; -import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.IndexTemplateMetadata; +import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.test.AbstractXContentTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.hamcrest.Matchers.equalTo; -public class GetIndexTemplatesResponseTests extends AbstractXContentTestCase { - @Override - protected GetIndexTemplatesResponse doParseInstance(XContentParser parser) throws IOException { - return GetIndexTemplatesResponse.fromXContent(parser); - } +public class GetIndexTemplatesResponseTests extends AbstractWireSerializingTestCase { @Override protected GetIndexTemplatesResponse createTestInstance() { @@ -80,7 +72,7 @@ protected GetIndexTemplatesResponse createTestInstance() { } if (randomBoolean()) { try { - templateBuilder.putMapping("doc", "{\"doc\":{\"properties\":{\"type\":\"text\"}}}"); + templateBuilder.putMapping("doc", "{\"properties\":{\"type\":\"text\"}}"); } catch (IOException ex) { throw new UncheckedIOException(ex); } @@ -91,20 +83,8 @@ protected GetIndexTemplatesResponse createTestInstance() { } @Override - protected boolean supportsUnknownFields() { - // We can not inject anything at the top level because a GetIndexTemplatesResponse is serialized as a map - // from template name to template content. IndexTemplateMetadataTests already covers situations where we - // inject arbitrary things inside the IndexTemplateMetadata. - return false; - } - - /** - * For now, we only unit test the legacy typed responses. This will soon no longer be the case, - * as we introduce support for typeless xContent parsing in {@link GetFieldMappingsResponse}. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(INCLUDE_TYPE_NAME_PARAMETER, "true")); + protected Writeable.Reader instanceReader() { + return GetIndexTemplatesResponse::new; } @Override diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java index 733d09126004b..a812dd2888e5d 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java @@ -286,19 +286,12 @@ public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { // Pretend the mappings haven't made it to the node yet BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); AtomicInteger updateCalled = new AtomicInteger(); - TransportShardBulkAction.executeBulkItemRequest( - context, - null, - threadPool::absoluteTimeInMillis, - (update, shardId, type, listener) -> { - // There should indeed be a mapping update - assertNotNull(update); - updateCalled.incrementAndGet(); - listener.onResponse(null); - }, - listener -> listener.onResponse(null), - ASSERTING_DONE_LISTENER - ); + TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, (update, shardId, listener) -> { + // There should indeed be a mapping update + assertNotNull(update); + updateCalled.incrementAndGet(); + listener.onResponse(null); + }, listener -> listener.onResponse(null), ASSERTING_DONE_LISTENER); assertTrue(context.isInitial()); assertTrue(context.hasMoreOperationsToExecute()); @@ -315,7 +308,7 @@ public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { context, null, threadPool::absoluteTimeInMillis, - (update, shardId, type, listener) -> fail("should not have had to update the mappings"), + (update, shardId, listener) -> fail("should not have had to update the mappings"), listener -> {}, ASSERTING_DONE_LISTENER ); @@ -989,7 +982,7 @@ public void testForceExecutionOnRejectionAfterMappingUpdate() throws Exception { shard, null, rejectingThreadPool::absoluteTimeInMillis, - (update, shardId, type, listener) -> { + (update, shardId, listener) -> { // There should indeed be a mapping update assertNotNull(update); updateCalled.incrementAndGet(); @@ -1090,7 +1083,7 @@ public Translog.Location getTranslogLocation() { /** Doesn't perform any mapping updates */ public static class NoopMappingUpdatePerformer implements MappingUpdatePerformer { @Override - public void updateMappings(Mapping update, ShardId shardId, String type, ActionListener listener) { + public void updateMappings(Mapping update, ShardId shardId, ActionListener listener) { listener.onResponse(null); } } @@ -1104,7 +1097,7 @@ private class ThrowingMappingUpdatePerformer implements MappingUpdatePerformer { } @Override - public void updateMappings(Mapping update, ShardId shardId, String type, ActionListener listener) { + public void updateMappings(Mapping update, ShardId shardId, ActionListener listener) { listener.onFailure(e); } } diff --git a/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java b/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java index 58c9a8d54159e..b7cdb3301384a 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java +++ b/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java @@ -188,7 +188,8 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio .build(); createIndex("test", setting, "type1", mapping); - client().prepareIndex("test", "type1", Integer.toString(1)) + client().prepareIndex("test") + .setId(Integer.toString(1)) .setSource(jsonBuilder().startObject().field("field", queryString).endObject()) .execute() .actionGet(); diff --git a/server/src/test/java/org/opensearch/client/AbstractClientHeadersTestCase.java b/server/src/test/java/org/opensearch/client/AbstractClientHeadersTestCase.java index e4e6594207a5e..92a88aa7940ee 100644 --- a/server/src/test/java/org/opensearch/client/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/opensearch/client/AbstractClientHeadersTestCase.java @@ -123,12 +123,13 @@ public void testActions() { // choosing arbitrary top level actions to test client.prepareGet("idx", "id").execute(new AssertingActionListener<>(GetAction.NAME, client.threadPool())); client.prepareSearch().execute(new AssertingActionListener<>(SearchAction.NAME, client.threadPool())); - client.prepareDelete("idx", "type", "id").execute(new AssertingActionListener<>(DeleteAction.NAME, client.threadPool())); + client.prepareDelete("idx", "id").execute(new AssertingActionListener<>(DeleteAction.NAME, client.threadPool())); client.admin() .cluster() .prepareDeleteStoredScript("id") .execute(new AssertingActionListener<>(DeleteStoredScriptAction.NAME, client.threadPool())); - client.prepareIndex("idx", "type", "id") + client.prepareIndex("idx") + .setId("id") .setSource("source", XContentType.JSON) .execute(new AssertingActionListener<>(IndexAction.NAME, client.threadPool())); diff --git a/server/src/test/java/org/opensearch/cluster/ClusterModuleTests.java b/server/src/test/java/org/opensearch/cluster/ClusterModuleTests.java index 8869c2857aa4f..004b784311b54 100644 --- a/server/src/test/java/org/opensearch/cluster/ClusterModuleTests.java +++ b/server/src/test/java/org/opensearch/cluster/ClusterModuleTests.java @@ -254,29 +254,29 @@ public void testAllocationDeciderOrder() { } public void testPre63CustomsFiltering() { - final String whiteListedClusterCustom = randomFrom(ClusterModule.PRE_6_3_CLUSTER_CUSTOMS_WHITE_LIST); - final String whiteListedMetadataCustom = randomFrom(ClusterModule.PRE_6_3_METADATA_CUSTOMS_WHITE_LIST); + final String allowListedClusterCustom = randomFrom(ClusterModule.PRE_6_3_CLUSTER_CUSTOMS_WHITE_LIST); + final String allowListedMetadataCustom = randomFrom(ClusterModule.PRE_6_3_METADATA_CUSTOMS_WHITE_LIST); final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) - .putCustom(whiteListedClusterCustom, new RestoreInProgress.Builder().build()) + .putCustom(allowListedClusterCustom, new RestoreInProgress.Builder().build()) .putCustom("other", new RestoreInProgress.Builder().build()) .metadata( Metadata.builder() - .putCustom(whiteListedMetadataCustom, new RepositoriesMetadata(Collections.emptyList())) + .putCustom(allowListedMetadataCustom, new RepositoriesMetadata(Collections.emptyList())) .putCustom("other", new RepositoriesMetadata(Collections.emptyList())) .build() ) .build(); - assertNotNull(clusterState.custom(whiteListedClusterCustom)); + assertNotNull(clusterState.custom(allowListedClusterCustom)); assertNotNull(clusterState.custom("other")); - assertNotNull(clusterState.metadata().custom(whiteListedMetadataCustom)); + assertNotNull(clusterState.metadata().custom(allowListedMetadataCustom)); assertNotNull(clusterState.metadata().custom("other")); final ClusterState fixedClusterState = ClusterModule.filterCustomsForPre63Clients(clusterState); - assertNotNull(fixedClusterState.custom(whiteListedClusterCustom)); + assertNotNull(fixedClusterState.custom(allowListedClusterCustom)); assertNull(fixedClusterState.custom("other")); - assertNotNull(fixedClusterState.metadata().custom(whiteListedMetadataCustom)); + assertNotNull(fixedClusterState.metadata().custom(allowListedMetadataCustom)); assertNull(fixedClusterState.metadata().custom("other")); } diff --git a/server/src/test/java/org/opensearch/cluster/action/index/MappingUpdatedActionTests.java b/server/src/test/java/org/opensearch/cluster/action/index/MappingUpdatedActionTests.java index d6812f7e53131..2278d09722fe2 100644 --- a/server/src/test/java/org/opensearch/cluster/action/index/MappingUpdatedActionTests.java +++ b/server/src/test/java/org/opensearch/cluster/action/index/MappingUpdatedActionTests.java @@ -124,19 +124,19 @@ public void testMappingUpdatedActionBlocks() throws Exception { ) { @Override - protected void sendUpdateMapping(Index index, String type, Mapping mappingUpdate, ActionListener listener) { + protected void sendUpdateMapping(Index index, Mapping mappingUpdate, ActionListener listener) { inFlightListeners.add(listener); } }; PlainActionFuture fut1 = new PlainActionFuture<>(); - mua.updateMappingOnMaster(null, "test", null, fut1); + mua.updateMappingOnMaster(null, null, fut1); assertEquals(1, inFlightListeners.size()); assertEquals(0, mua.blockedThreads()); PlainActionFuture fut2 = new PlainActionFuture<>(); Thread thread = new Thread(() -> { - mua.updateMappingOnMaster(null, "test", null, fut2); // blocked + mua.updateMappingOnMaster(null, null, fut2); // blocked }); thread.start(); assertBusy(() -> assertEquals(1, mua.blockedThreads())); @@ -180,7 +180,7 @@ public void testSendUpdateMappingUsingPutMappingAction() { RootObjectMapper rootObjectMapper = new RootObjectMapper.Builder("name").build(context); Mapping update = new Mapping(LegacyESVersion.V_7_8_0, rootObjectMapper, new MetadataFieldMapper[0], Map.of()); - mua.sendUpdateMapping(new Index("name", "uuid"), "type", update, ActionListener.wrap(() -> {})); + mua.sendUpdateMapping(new Index("name", "uuid"), update, ActionListener.wrap(() -> {})); verify(indicesAdminClient).putMapping(any(), any()); } @@ -210,7 +210,7 @@ public void testSendUpdateMappingUsingAutoPutMappingAction() { RootObjectMapper rootObjectMapper = new RootObjectMapper.Builder("name").build(context); Mapping update = new Mapping(LegacyESVersion.V_7_9_0, rootObjectMapper, new MetadataFieldMapper[0], Map.of()); - mua.sendUpdateMapping(new Index("name", "uuid"), "type", update, ActionListener.wrap(() -> {})); + mua.sendUpdateMapping(new Index("name", "uuid"), update, ActionListener.wrap(() -> {})); verify(indicesAdminClient).execute(eq(AutoPutMappingAction.INSTANCE), any(), any()); } } diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java index f25cf07455be7..b1043dba0a02e 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java @@ -39,7 +39,6 @@ import org.opensearch.common.compress.CompressedXContent; import org.opensearch.index.Index; import org.opensearch.index.IndexService; -import org.opensearch.index.mapper.MapperService; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.opensearch.test.InternalSettingsPlugin; @@ -64,9 +63,10 @@ public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Ex final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); // TODO - it will be nice to get a random mapping generator - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest( + "{ \"properties\": { \"field\": { \"type\": \"text\" }}}" + ); request.indices(new Index[] { indexService.index() }); - request.source("{ \"properties\": { \"field\": { \"type\": \"text\" }}}"); final ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor .execute(clusterService.state(), Collections.singletonList(request)); // the task completed successfully @@ -86,8 +86,9 @@ public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); - request.source("{ \"properties\" { \"field\": { \"type\": \"text\" }}}"); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest( + "{ \"properties\" { \"field\": { \"type\": \"text\" }}}" + ); ClusterState result = mappingService.putMappingExecutor.execute( clusterService.state(), Collections.singletonList(request) @@ -105,9 +106,10 @@ public void testMappingVersion() throws Exception { final long previousVersion = indexService.getMetadata().getMappingVersion(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest( + "{ \"properties\": { \"field\": { \"type\": \"text\" }}}" + ); request.indices(new Index[] { indexService.index() }); - request.source("{ \"properties\": { \"field\": { \"type\": \"text\" }}}"); final ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor .execute(clusterService.state(), Collections.singletonList(request)); assertThat(result.executionResults.size(), equalTo(1)); @@ -120,34 +122,12 @@ public void testMappingVersionUnchanged() throws Exception { final long previousVersion = indexService.getMetadata().getMappingVersion(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest("{ \"properties\": {}}"); request.indices(new Index[] { indexService.index() }); - request.source("{ \"properties\": {}}"); final ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor .execute(clusterService.state(), Collections.singletonList(request)); assertThat(result.executionResults.size(), equalTo(1)); assertTrue(result.executionResults.values().iterator().next().isSuccess()); assertThat(result.resultingState.metadata().index("test").getMappingVersion(), equalTo(previousVersion)); } - - public void testMappingUpdateAccepts_docAsType() throws Exception { - final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("my_type")); - final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); - final ClusterService clusterService = getInstanceFromNode(ClusterService.class); - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type( - MapperService.SINGLE_MAPPING_NAME - ); - request.indices(new Index[] { indexService.index() }); - request.source("{ \"properties\": { \"foo\": { \"type\": \"keyword\" } }}"); - final ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor - .execute(clusterService.state(), Collections.singletonList(request)); - assertThat(result.executionResults.size(), equalTo(1)); - assertTrue(result.executionResults.values().iterator().next().isSuccess()); - MappingMetadata mappingMetadata = result.resultingState.metadata().index("test").mapping(); - assertEquals("my_type", mappingMetadata.type()); - assertEquals( - Collections.singletonMap("properties", Collections.singletonMap("foo", Collections.singletonMap("type", "keyword"))), - mappingMetadata.sourceAsMap() - ); - } } diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java index 2ef9cf3bc9b1f..64716794bde2b 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java @@ -659,34 +659,20 @@ public void testFindMappings() throws IOException { .build(); { - ImmutableOpenMap> mappings = metadata.findMappings( - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - MapperPlugin.NOOP_FIELD_FILTER - ); - assertEquals(0, mappings.size()); - } - { - ImmutableOpenMap> mappings = metadata.findMappings( - new String[] { "index1" }, - new String[] { "notfound" }, - MapperPlugin.NOOP_FIELD_FILTER - ); + ImmutableOpenMap mappings = metadata.findMappings(Strings.EMPTY_ARRAY, MapperPlugin.NOOP_FIELD_FILTER); assertEquals(0, mappings.size()); } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1" }, - Strings.EMPTY_ARRAY, MapperPlugin.NOOP_FIELD_FILTER ); assertEquals(1, mappings.size()); assertIndexMappingsNotFiltered(mappings, "index1"); } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1", "index2" }, - new String[] { randomBoolean() ? "_doc" : "_all" }, MapperPlugin.NOOP_FIELD_FILTER ); assertEquals(2, mappings.size()); @@ -715,43 +701,19 @@ public void testFindMappingsNoOpFilters() throws IOException { .build(); { - ImmutableOpenMap> mappings = metadata.findMappings( - new String[] { "index1" }, - randomBoolean() ? Strings.EMPTY_ARRAY : new String[] { "_all" }, - MapperPlugin.NOOP_FIELD_FILTER - ); - ImmutableOpenMap index1 = mappings.get("index1"); - MappingMetadata mappingMetadata = index1.get("_doc"); - assertSame(originalMappingMetadata, mappingMetadata); - } - { - ImmutableOpenMap> mappings = metadata.findMappings( - new String[] { "index1" }, - randomBoolean() ? Strings.EMPTY_ARRAY : new String[] { "_all" }, - index -> field -> randomBoolean() - ); - ImmutableOpenMap index1 = mappings.get("index1"); - MappingMetadata mappingMetadata = index1.get("_doc"); - assertNotSame(originalMappingMetadata, mappingMetadata); - } - { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1" }, - new String[] { "_doc" }, MapperPlugin.NOOP_FIELD_FILTER ); - ImmutableOpenMap index1 = mappings.get("index1"); - MappingMetadata mappingMetadata = index1.get("_doc"); + MappingMetadata mappingMetadata = mappings.get("index1"); assertSame(originalMappingMetadata, mappingMetadata); } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1" }, - new String[] { "_doc" }, index -> field -> randomBoolean() ); - ImmutableOpenMap index1 = mappings.get("index1"); - MappingMetadata mappingMetadata = index1.get("_doc"); + MappingMetadata mappingMetadata = mappings.get("index1"); assertNotSame(originalMappingMetadata, mappingMetadata); } } @@ -802,9 +764,8 @@ public void testFindMappingsWithFilters() throws IOException { .build(); { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1", "index2", "index3" }, - new String[] { "_doc" }, index -> { if (index.equals("index1")) { return field -> field.startsWith("name.") == false @@ -822,11 +783,7 @@ public void testFindMappingsWithFilters() throws IOException { assertIndexMappingsNoFields(mappings, "index2"); assertIndexMappingsNotFiltered(mappings, "index3"); - ImmutableOpenMap index1Mappings = mappings.get("index1"); - assertNotNull(index1Mappings); - - assertEquals(1, index1Mappings.size()); - MappingMetadata docMapping = index1Mappings.get("_doc"); + MappingMetadata docMapping = mappings.get("index1"); assertNotNull(docMapping); Map sourceAsMap = docMapping.getSourceAsMap(); @@ -868,17 +825,14 @@ public void testFindMappingsWithFilters() throws IOException { } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1", "index2", "index3" }, - new String[] { "_doc" }, index -> field -> (index.equals("index3") && field.endsWith("keyword")) ); assertIndexMappingsNoFields(mappings, "index1"); assertIndexMappingsNoFields(mappings, "index2"); - ImmutableOpenMap index3 = mappings.get("index3"); - assertEquals(1, index3.size()); - MappingMetadata mappingMetadata = index3.get("_doc"); + MappingMetadata mappingMetadata = mappings.get("index3"); Map sourceAsMap = mappingMetadata.getSourceAsMap(); assertEquals(3, sourceAsMap.size()); assertTrue(sourceAsMap.containsKey("_routing")); @@ -906,9 +860,8 @@ public void testFindMappingsWithFilters() throws IOException { } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1", "index2", "index3" }, - new String[] { "_doc" }, index -> field -> (index.equals("index2")) ); @@ -928,14 +881,8 @@ private static IndexMetadata.Builder buildIndexMetadata(String name, String alia } @SuppressWarnings("unchecked") - private static void assertIndexMappingsNoFields( - ImmutableOpenMap> mappings, - String index - ) { - ImmutableOpenMap indexMappings = mappings.get(index); - assertNotNull(indexMappings); - assertEquals(1, indexMappings.size()); - MappingMetadata docMapping = indexMappings.get("_doc"); + private static void assertIndexMappingsNoFields(ImmutableOpenMap mappings, String index) { + MappingMetadata docMapping = mappings.get(index); assertNotNull(docMapping); Map sourceAsMap = docMapping.getSourceAsMap(); assertEquals(3, sourceAsMap.size()); @@ -946,15 +893,8 @@ private static void assertIndexMappingsNoFields( } @SuppressWarnings("unchecked") - private static void assertIndexMappingsNotFiltered( - ImmutableOpenMap> mappings, - String index - ) { - ImmutableOpenMap indexMappings = mappings.get(index); - assertNotNull(indexMappings); - - assertEquals(1, indexMappings.size()); - MappingMetadata docMapping = indexMappings.get("_doc"); + private static void assertIndexMappingsNotFiltered(ImmutableOpenMap mappings, String index) { + MappingMetadata docMapping = mappings.get(index); assertNotNull(docMapping); Map sourceAsMap = docMapping.getSourceAsMap(); diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java index 2b408097757a5..0b00d26182346 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java @@ -95,7 +95,7 @@ public void testFilterInitialRecovery() { assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); assertNull(routingTable.index("idx").shard(0).shards().get(0).currentNodeId()); - // after failing the shard we are unassigned since the node is blacklisted and we can't initialize on the other node + // after failing the shard we are unassigned since the node is denylisted and we can't initialize on the other node RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, state.getRoutingNodes(), state, null, null, 0); allocation.debugDecision(true); Decision.Single decision = (Decision.Single) filterAllocationDecider.canAllocate( diff --git a/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java b/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java new file mode 100644 index 0000000000000..63058da8f163a --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +public class GatedAutoCloseableTests extends OpenSearchTestCase { + + private AtomicInteger testRef; + private GatedAutoCloseable testObject; + + @Before + public void setup() { + testRef = new AtomicInteger(0); + testObject = new GatedAutoCloseable<>(testRef, testRef::incrementAndGet); + } + + public void testGet() { + assertEquals(0, testObject.get().get()); + } + + public void testClose() { + testObject.close(); + assertEquals(1, testObject.get().get()); + } + + public void testIdempotent() { + testObject.close(); + testObject.close(); + assertEquals(1, testObject.get().get()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java b/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java new file mode 100644 index 0000000000000..0645f971b8d63 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java @@ -0,0 +1,60 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.nio.file.FileSystem; + +import static org.mockito.Mockito.atMostOnce; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +public class GatedCloseableTests extends OpenSearchTestCase { + + private FileSystem testRef; + GatedCloseable testObject; + + @Before + public void setup() { + testRef = mock(FileSystem.class); + testObject = new GatedCloseable<>(testRef, testRef::close); + } + + public void testGet() throws Exception { + assertNotNull(testObject.get()); + assertEquals(testRef, testObject.get()); + verify(testRef, never()).close(); + } + + public void testClose() throws IOException { + testObject.close(); + verify(testRef, atMostOnce()).close(); + } + + public void testIdempotent() throws IOException { + testObject.close(); + testObject.close(); + verify(testRef, atMostOnce()).close(); + } + + public void testException() throws IOException { + doThrow(new IOException()).when(testRef).close(); + assertThrows(IOException.class, () -> testObject.close()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java b/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java new file mode 100644 index 0000000000000..357bf3ae321f8 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java @@ -0,0 +1,41 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +public class OneWayGateTests extends OpenSearchTestCase { + + private OneWayGate testGate; + + @Before + public void setup() { + testGate = new OneWayGate(); + } + + public void testGateOpen() { + assertFalse(testGate.isClosed()); + } + + public void testGateClosed() { + testGate.close(); + assertTrue(testGate.isClosed()); + } + + public void testGateIdempotent() { + assertTrue(testGate.close()); + assertFalse(testGate.close()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java b/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java new file mode 100644 index 0000000000000..63c0873f1593d --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +public class RefCountedReleasableTests extends OpenSearchTestCase { + + private AtomicInteger testRef; + private RefCountedReleasable testObject; + + @Before + public void setup() { + testRef = new AtomicInteger(0); + testObject = new RefCountedReleasable<>("test", testRef, testRef::incrementAndGet); + } + + public void testInitialState() { + assertEquals("test", testObject.getName()); + assertEquals(testRef, testObject.get()); + assertEquals(testRef, testObject.get()); + assertEquals(0, testObject.get().get()); + assertEquals(1, testObject.refCount()); + } + + public void testIncRef() { + testObject.incRef(); + assertEquals(2, testObject.refCount()); + assertEquals(0, testObject.get().get()); + } + + public void testCloseWithoutInternal() { + testObject.incRef(); + assertEquals(2, testObject.refCount()); + testObject.close(); + assertEquals(1, testObject.refCount()); + assertEquals(0, testObject.get().get()); + } + + public void testCloseWithInternal() { + assertEquals(1, testObject.refCount()); + testObject.close(); + assertEquals(0, testObject.refCount()); + assertEquals(1, testObject.get().get()); + } + + public void testIncRefAfterClose() { + assertEquals(1, testObject.refCount()); + testObject.close(); + assertEquals(0, testObject.refCount()); + assertEquals(1, testObject.get().get()); + assertThrows(IllegalStateException.class, () -> testObject.incRef()); + } +} diff --git a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java index b67372ea9e838..1e57f9fe88d9c 100644 --- a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java @@ -95,16 +95,127 @@ public void testEpochMillisParser() { Instant instant = Instant.from(formatter.parse("12345")); assertThat(instant.getEpochSecond(), is(12L)); assertThat(instant.getNano(), is(345_000_000)); + assertThat(formatter.format(instant), is("12345")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); } { Instant instant = Instant.from(formatter.parse("0")); assertThat(instant.getEpochSecond(), is(0L)); assertThat(instant.getNano(), is(0)); + assertThat(formatter.format(instant), is("0")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-123000.123456")); + assertThat(instant.getEpochSecond(), is(-124L)); + assertThat(instant.getNano(), is(999876544)); + assertThat(formatter.format(instant), is("-123000.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); } { Instant instant = Instant.from(formatter.parse("123.123456")); assertThat(instant.getEpochSecond(), is(0L)); assertThat(instant.getNano(), is(123123456)); + assertThat(formatter.format(instant), is("123.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-123.123456")); + assertThat(instant.getEpochSecond(), is(-1L)); + assertThat(instant.getNano(), is(876876544)); + assertThat(formatter.format(instant), is("-123.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6789123.123456")); + assertThat(instant.getEpochSecond(), is(-6790L)); + assertThat(instant.getNano(), is(876876544)); + assertThat(formatter.format(instant), is("-6789123.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("6789123.123456")); + assertThat(instant.getEpochSecond(), is(6789L)); + assertThat(instant.getNano(), is(123123456)); + assertThat(formatter.format(instant), is("6789123.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000430768.25")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(231750000)); + assertThat(formatter.format(instant), is("-6250000430768.25")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000430768.75")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(231250000)); + assertThat(formatter.format(instant), is("-6250000430768.75")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000430768.00")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(232000000)); + assertThat(formatter.format(instant), is("-6250000430768")); // remove .00 precision + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000.250000")); + assertThat(instant.getEpochSecond(), is(-6250000432L)); + assertThat(instant.getNano(), is(999750000)); + assertThat(formatter.format(instant), is("-6250000431000.25")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000.000001")); + assertThat(instant.getEpochSecond(), is(-6250000432L)); + assertThat(instant.getNano(), is(999999999)); + assertThat(formatter.format(instant), is("-6250000431000.000001")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000.75")); + assertThat(instant.getEpochSecond(), is(-6250000432L)); + assertThat(instant.getNano(), is(999250000)); + assertThat(formatter.format(instant), is("-6250000431000.75")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000.00")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(0)); + assertThat(formatter.format(instant), is("-6250000431000")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(0)); + assertThat(formatter.format(instant), is("-6250000431000")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000430768")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(232000000)); + assertThat(formatter.format(instant), is("-6250000430768")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("1680000430768")); + assertThat(instant.getEpochSecond(), is(1680000430L)); + assertThat(instant.getNano(), is(768000000)); + assertThat(formatter.format(instant), is("1680000430768")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-0.12345")); + assertThat(instant.getEpochSecond(), is(-1L)); + assertThat(instant.getNano(), is(999876550)); + assertThat(formatter.format(instant), is("-0.12345")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); } } @@ -227,20 +338,69 @@ public void testEpochFormatting() { long seconds = randomLongBetween(0, 130L * 365 * 86400); // from 1970 epoch till around 2100 long nanos = randomLongBetween(0, 999_999_999L); Instant instant = Instant.ofEpochSecond(seconds, nanos); + { + DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); + String millis = millisFormatter.format(instant); + Instant millisInstant = Instant.from(millisFormatter.parse(millis)); + assertThat(millisInstant.toEpochMilli(), is(instant.toEpochMilli())); + assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 0)), is("42000")); + assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 123456789L)), is("42123.456789")); + + DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second"); + String formattedSeconds = secondsFormatter.format(instant); + Instant secondsInstant = Instant.from(secondsFormatter.parse(formattedSeconds)); + assertThat(secondsInstant.getEpochSecond(), is(instant.getEpochSecond())); + + assertThat(secondsFormatter.format(Instant.ofEpochSecond(42, 0)), is("42")); + } + { + DateFormatter isoFormatter = DateFormatters.forPattern("strict_date_optional_time_nanos"); + DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); + String millis = millisFormatter.format(instant); + String iso8601 = isoFormatter.format(instant); - DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); - String millis = millisFormatter.format(instant); - Instant millisInstant = Instant.from(millisFormatter.parse(millis)); - assertThat(millisInstant.toEpochMilli(), is(instant.toEpochMilli())); - assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 0)), is("42000")); - assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 123456789L)), is("42123.456789")); + Instant millisInstant = Instant.from(millisFormatter.parse(millis)); + Instant isoInstant = Instant.from(isoFormatter.parse(iso8601)); - DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second"); - String formattedSeconds = secondsFormatter.format(instant); - Instant secondsInstant = Instant.from(secondsFormatter.parse(formattedSeconds)); - assertThat(secondsInstant.getEpochSecond(), is(instant.getEpochSecond())); + assertThat(millisInstant.toEpochMilli(), is(isoInstant.toEpochMilli())); + assertThat(millisInstant.getEpochSecond(), is(isoInstant.getEpochSecond())); + assertThat(millisInstant.getNano(), is(isoInstant.getNano())); + } + } + + public void testEpochFormattingNegativeEpoch() { + long seconds = randomLongBetween(-130L * 365 * 86400, 0); // around 1840 till 1970 epoch + long nanos = randomLongBetween(0, 999_999_999L); + Instant instant = Instant.ofEpochSecond(seconds, nanos); - assertThat(secondsFormatter.format(Instant.ofEpochSecond(42, 0)), is("42")); + { + DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); + String millis = millisFormatter.format(instant); + Instant millisInstant = Instant.from(millisFormatter.parse(millis)); + assertThat(millisInstant.toEpochMilli(), is(instant.toEpochMilli())); + assertThat(millisFormatter.format(Instant.ofEpochSecond(-42, 0)), is("-42000")); + assertThat(millisFormatter.format(Instant.ofEpochSecond(-42, 123456789L)), is("-41876.543211")); + + DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second"); + String formattedSeconds = secondsFormatter.format(instant); + Instant secondsInstant = Instant.from(secondsFormatter.parse(formattedSeconds)); + assertThat(secondsInstant.getEpochSecond(), is(instant.getEpochSecond())); + + assertThat(secondsFormatter.format(Instant.ofEpochSecond(42, 0)), is("42")); + } + { + DateFormatter isoFormatter = DateFormatters.forPattern("strict_date_optional_time_nanos"); + DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); + String millis = millisFormatter.format(instant); + String iso8601 = isoFormatter.format(instant); + + Instant millisInstant = Instant.from(millisFormatter.parse(millis)); + Instant isoInstant = Instant.from(isoFormatter.parse(iso8601)); + + assertThat(millisInstant.toEpochMilli(), is(isoInstant.toEpochMilli())); + assertThat(millisInstant.getEpochSecond(), is(isoInstant.getEpochSecond())); + assertThat(millisInstant.getNano(), is(isoInstant.getNano())); + } } public void testParsingStrictNanoDates() { diff --git a/server/src/test/java/org/opensearch/index/IndexServiceTests.java b/server/src/test/java/org/opensearch/index/IndexServiceTests.java index 47feced5302f4..be38b707b77b4 100644 --- a/server/src/test/java/org/opensearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/opensearch/index/IndexServiceTests.java @@ -300,7 +300,7 @@ public void testRefreshActuallyWorks() throws Exception { assertEquals(1000, refreshTask.getInterval().millis()); assertTrue(indexService.getRefreshTask().mustReschedule()); IndexShard shard = indexService.getShard(0); - client().prepareIndex("test", "test", "0").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("0").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); // now disable the refresh client().admin() .indices() @@ -321,7 +321,7 @@ public void testRefreshActuallyWorks() throws Exception { }); assertFalse(refreshTask.isClosed()); // refresh every millisecond - client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); client().admin() .indices() .prepareUpdateSettings("test") @@ -335,7 +335,7 @@ public void testRefreshActuallyWorks() throws Exception { assertEquals(2, search.totalHits.value); } }); - client().prepareIndex("test", "test", "2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); assertBusy(() -> { // this one becomes visible due to the scheduled refresh try (Engine.Searcher searcher = shard.acquireSearcher("test")) { @@ -353,7 +353,7 @@ public void testAsyncFsyncActuallyWorks() throws Exception { IndexService indexService = createIndex("test", settings); ensureGreen("test"); assertTrue(indexService.getRefreshTask().mustReschedule()); - client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); IndexShard shard = indexService.getShard(0); assertBusy(() -> assertFalse(shard.isSyncNeeded())); } @@ -375,7 +375,7 @@ public void testRescheduleAsyncFsync() throws Exception { assertNotNull(indexService.getFsyncTask()); assertTrue(indexService.getFsyncTask().mustReschedule()); - client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); assertNotNull(indexService.getFsyncTask()); final IndexShard shard = indexService.getShard(0); assertBusy(() -> assertFalse(shard.isSyncNeeded())); @@ -402,7 +402,7 @@ public void testAsyncTranslogTrimActuallyWorks() throws Exception { IndexService indexService = createIndex("test", settings); ensureGreen("test"); assertTrue(indexService.getTrimTranslogTask().mustReschedule()); - client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); client().admin().indices().prepareFlush("test").get(); client().admin() .indices() diff --git a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java index 40affca7b2773..ed6ae6f15dfd2 100644 --- a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java @@ -14,6 +14,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceFactory; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicyFactory; @@ -85,6 +86,18 @@ public void testCreateEngineConfigFromFactoryMultipleCodecServiceIllegalStateExc expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); } + public void testCreateEngineConfigFromFactoryMultipleCodecServiceAndFactoryIllegalStateException() { + IndexMetadata meta = IndexMetadata.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + List plugins = Arrays.asList(new FooEnginePlugin(), new BakEnginePlugin()); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); + + expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); + } + public void testCreateEngineConfigFromFactoryMultipleCustomTranslogDeletionPolicyFactoryIllegalStateException() { IndexMetadata meta = IndexMetadata.builder("test") .settings(settings(Version.CURRENT)) @@ -97,6 +110,44 @@ public void testCreateEngineConfigFromFactoryMultipleCustomTranslogDeletionPolic expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); } + public void testCreateCodecServiceFromFactory() { + IndexMetadata meta = IndexMetadata.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + List plugins = Arrays.asList(new BakEnginePlugin()); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); + + EngineConfigFactory factory = new EngineConfigFactory(plugins, indexSettings); + EngineConfig config = factory.newEngineConfig( + null, + null, + indexSettings, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + TimeValue.timeValueMinutes(5), + null, + null, + null, + null, + null, + () -> new RetentionLeases(0, 0, Collections.emptyList()), + null, + null, + false + ); + assertNotNull(config.getCodec()); + } + private static class FooEnginePlugin extends Plugin implements EnginePlugin { @Override public Optional getEngineFactory(final IndexSettings indexSettings) { @@ -126,6 +177,18 @@ public Optional getCustomCodecService(IndexSettings indexSettings) } } + private static class BakEnginePlugin extends Plugin implements EnginePlugin { + @Override + public Optional getEngineFactory(final IndexSettings indexSettings) { + return Optional.empty(); + } + + @Override + public Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { + return Optional.of(config -> new CodecService(config.getMapperService(), LogManager.getLogger(getClass()))); + } + } + private static class BazEnginePlugin extends Plugin implements EnginePlugin { @Override public Optional getEngineFactory(final IndexSettings indexSettings) { diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 0725445a39865..bb3ce773a13af 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -82,6 +82,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.SetOnce; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -101,6 +103,7 @@ import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.logging.Loggers; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; @@ -154,8 +157,6 @@ import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.ThreadPool; -import org.hamcrest.MatcherAssert; -import org.hamcrest.Matchers; import java.io.Closeable; import java.io.IOException; @@ -196,15 +197,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.shuffle; -import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_RESET; -import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; -import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.opensearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; -import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.Matchers.contains; @@ -230,6 +222,15 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_RESET; +import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; +import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; +import static org.opensearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; public class InternalEngineTests extends EngineTestCase { @@ -1086,9 +1087,9 @@ public void testSyncTranslogConcurrently() throws Exception { final CheckedRunnable checker = () -> { assertThat(engine.getTranslogStats().getUncommittedOperations(), equalTo(0)); assertThat(engine.getLastSyncedGlobalCheckpoint(), equalTo(globalCheckpoint.get())); - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { SequenceNumbers.CommitInfo commitInfo = SequenceNumbers.loadSeqNoInfoFromLuceneCommit( - safeCommit.getIndexCommit().getUserData().entrySet() + wrappedSafeCommit.get().getUserData().entrySet() ); assertThat(commitInfo.localCheckpoint, equalTo(engine.getProcessedLocalCheckpoint())); } @@ -1504,8 +1505,8 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { globalCheckpoint.set(randomLongBetween(0, localCheckpoint)); engine.syncTranslog(); final long safeCommitCheckpoint; - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - safeCommitCheckpoint = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + safeCommitCheckpoint = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); @@ -1594,9 +1595,9 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc globalCheckpoint.set(randomLongBetween(0, engine.getPersistedLocalCheckpoint())); engine.syncTranslog(); final long minSeqNoToRetain; - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { long safeCommitLocalCheckpoint = Long.parseLong( - safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY) + wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ); minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps, safeCommitLocalCheckpoint + 1); } @@ -2615,7 +2616,7 @@ public void testSeqNoAndCheckpoints() throws IOException, InterruptedException { // this test writes documents to the engine while concurrently flushing/commit // and ensuring that the commit points contain the correct sequence number data public void testConcurrentWritesAndCommits() throws Exception { - List commits = new ArrayList<>(); + List> commits = new ArrayList<>(); try ( Store store = createStore(); InternalEngine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null)) @@ -2670,8 +2671,8 @@ public void testConcurrentWritesAndCommits() throws Exception { // now, verify all the commits have the correct docs according to the user commit data long prevLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; long prevMaxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; - for (Engine.IndexCommitRef commitRef : commits) { - final IndexCommit commit = commitRef.getIndexCommit(); + for (GatedCloseable wrappedCommit : commits) { + final IndexCommit commit = wrappedCommit.get(); Map userData = commit.getUserData(); long localCheckpoint = userData.containsKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ? Long.parseLong(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) @@ -5621,7 +5622,7 @@ public void testAcquireIndexCommit() throws Exception { IOUtils.close(engine, store); store = createStore(); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final Engine.IndexCommitRef snapshot; + final GatedCloseable wrappedSnapshot; final boolean closeSnapshotBeforeEngine = randomBoolean(); try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) { int numDocs = between(1, 20); @@ -5634,9 +5635,9 @@ public void testAcquireIndexCommit() throws Exception { final boolean flushFirst = randomBoolean(); final boolean safeCommit = randomBoolean(); if (safeCommit) { - snapshot = engine.acquireSafeIndexCommit(); + wrappedSnapshot = engine.acquireSafeIndexCommit(); } else { - snapshot = engine.acquireLastIndexCommit(flushFirst); + wrappedSnapshot = engine.acquireLastIndexCommit(flushFirst); } int moreDocs = between(1, 20); for (int i = 0; i < moreDocs; i++) { @@ -5645,13 +5646,13 @@ public void testAcquireIndexCommit() throws Exception { globalCheckpoint.set(numDocs + moreDocs - 1); engine.flush(); // check that we can still read the commit that we captured - try (IndexReader reader = DirectoryReader.open(snapshot.getIndexCommit())) { + try (IndexReader reader = DirectoryReader.open(wrappedSnapshot.get())) { assertThat(reader.numDocs(), equalTo(flushFirst && safeCommit == false ? numDocs : 0)); } assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(2)); if (closeSnapshotBeforeEngine) { - snapshot.close(); + wrappedSnapshot.close(); // check it's clean up engine.flush(true, true); assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(1)); @@ -5659,7 +5660,7 @@ public void testAcquireIndexCommit() throws Exception { } if (closeSnapshotBeforeEngine == false) { - snapshot.close(); // shouldn't throw AlreadyClosedException + wrappedSnapshot.close(); // shouldn't throw AlreadyClosedException } } @@ -5723,7 +5724,7 @@ public void testCleanupCommitsWhenReleaseSnapshot() throws Exception { } engine.flush(false, randomBoolean()); int numSnapshots = between(1, 10); - final List snapshots = new ArrayList<>(); + final List> snapshots = new ArrayList<>(); for (int i = 0; i < numSnapshots; i++) { snapshots.add(engine.acquireSafeIndexCommit()); // taking snapshots from the safe commit. } @@ -6096,7 +6097,7 @@ public void testTrimUnsafeCommits() throws Exception { minTranslogGen = engine.getTranslog().getMinFileGeneration(); } - store.trimUnsafeCommits(globalCheckpoint.get(), minTranslogGen, config.getIndexSettings().getIndexVersionCreated()); + store.trimUnsafeCommits(config.getTranslogConfig().getTranslogPath()); long safeMaxSeqNo = commitMaxSeqNo.stream() .filter(s -> s <= globalCheckpoint.get()) .reduce((s1, s2) -> s2) // get the last one. @@ -6326,8 +6327,8 @@ public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException { .collect(Collectors.toSet()); assertThat(actualOps, containsInAnyOrder(expectedOps)); } - try (Engine.IndexCommitRef commitRef = engine.acquireSafeIndexCommit()) { - IndexCommit safeCommit = commitRef.getIndexCommit(); + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + IndexCommit safeCommit = wrappedSafeCommit.get(); if (safeCommit.getUserData().containsKey(Engine.MIN_RETAINED_SEQNO)) { lastMinRetainedSeqNo = Long.parseLong(safeCommit.getUserData().get(Engine.MIN_RETAINED_SEQNO)); } diff --git a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java index 65b8a81b029c0..e04bf1a4f20f2 100644 --- a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java @@ -33,6 +33,7 @@ package org.opensearch.index.engine; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.store.LockObtainFailedException; @@ -41,6 +42,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.TestShardRouting; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; @@ -114,8 +116,8 @@ public void testNoopAfterRegularEngine() throws IOException { final NoOpEngine noOpEngine = new NoOpEngine(noOpConfig(INDEX_SETTINGS, store, primaryTranslogDir, tracker)); assertThat(noOpEngine.getPersistedLocalCheckpoint(), equalTo(localCheckpoint)); assertThat(noOpEngine.getSeqNoStats(100L).getMaxSeqNo(), equalTo(maxSeqNo)); - try (Engine.IndexCommitRef ref = noOpEngine.acquireLastIndexCommit(false)) { - try (IndexReader reader = DirectoryReader.open(ref.getIndexCommit())) { + try (GatedCloseable wrappedCommit = noOpEngine.acquireLastIndexCommit(false)) { + try (IndexReader reader = DirectoryReader.open(wrappedCommit.get())) { assertThat(reader.numDocs(), equalTo(docs)); } } diff --git a/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java index e1a3cfb91af9c..d7cf873e133df 100644 --- a/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -128,7 +128,7 @@ private void refreshIndex() { } private void indexDocument(String id, String sValue) { - IndexResponse response = client().prepareIndex("index", "type", id).setSource("s", sValue).get(); + IndexResponse response = client().prepareIndex("index").setId(id).setSource("s", sValue).get(); assertThat(response.status(), anyOf(equalTo(RestStatus.OK), equalTo(RestStatus.CREATED))); } } diff --git a/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java index 87e3ba253bfe3..27e895ee64f90 100644 --- a/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java @@ -74,9 +74,7 @@ protected Collection> getPlugins() { public void putMappings() { assertAcked(client().admin().indices().prepareCreate("index1")); assertAcked(client().admin().indices().prepareCreate("filtered")); - assertAcked( - client().admin().indices().preparePutMapping("index1", "filtered").setType("_doc").setSource(TEST_ITEM, XContentType.JSON) - ); + assertAcked(client().admin().indices().preparePutMapping("index1", "filtered").setSource(TEST_ITEM, XContentType.JSON)); } public void testGetMappings() { @@ -102,8 +100,8 @@ public void testGetFieldMappings() { // double check that submitting the filtered mappings to an unfiltered index leads to the same get field mappings output // as the one coming from a filtered index with same mappings GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); - ImmutableOpenMap filtered = getMappingsResponse.getMappings().get("filtered"); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filtered.get("_doc").getSourceAsMap())); + MappingMetadata filtered = getMappingsResponse.getMappings().get("filtered"); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filtered.getSourceAsMap())); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("test").setFields("*").get(); assertEquals(1, response.mappings().size()); assertFieldMappings(response.mappings().get("test"), FILTERED_FLAT_FIELDS); @@ -121,8 +119,8 @@ public void testFieldCapabilities() { // double check that submitting the filtered mappings to an unfiltered index leads to the same field_caps output // as the one coming from a filtered index with same mappings GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); - ImmutableOpenMap filteredMapping = getMappingsResponse.getMappings().get("filtered"); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filteredMapping.get("_doc").getSourceAsMap())); + MappingMetadata filteredMapping = getMappingsResponse.getMappings().get("filtered"); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filteredMapping.getSourceAsMap())); FieldCapabilitiesResponse test = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("test")).actionGet(); // properties.value is an object field in the new mapping filteredFields.add("properties.value"); @@ -161,12 +159,12 @@ private static void assertFieldMappings( assertEquals("Some unexpected fields were returned: " + fields.keySet(), 0, fields.size()); } - private void assertExpectedMappings(ImmutableOpenMap> mappings) { + private void assertExpectedMappings(ImmutableOpenMap mappings) { assertEquals(2, mappings.size()); assertNotFiltered(mappings.get("index1")); - ImmutableOpenMap filtered = mappings.get("filtered"); + MappingMetadata filtered = mappings.get("filtered"); assertFiltered(filtered); - assertMappingsAreValid(filtered.get("_doc").getSourceAsMap()); + assertMappingsAreValid(filtered.getSourceAsMap()); } private void assertMappingsAreValid(Map sourceAsMap) { @@ -179,9 +177,7 @@ private void assertMappingsAreValid(Map sourceAsMap) { } @SuppressWarnings("unchecked") - private static void assertFiltered(ImmutableOpenMap mappings) { - assertEquals(1, mappings.size()); - MappingMetadata mappingMetadata = mappings.get("_doc"); + private static void assertFiltered(MappingMetadata mappingMetadata) { assertNotNull(mappingMetadata); Map sourceAsMap = mappingMetadata.getSourceAsMap(); assertEquals(4, sourceAsMap.size()); @@ -226,9 +222,7 @@ private static void assertFiltered(ImmutableOpenMap map } @SuppressWarnings("unchecked") - private static void assertNotFiltered(ImmutableOpenMap mappings) { - assertEquals(1, mappings.size()); - MappingMetadata mappingMetadata = mappings.get("_doc"); + private static void assertNotFiltered(MappingMetadata mappingMetadata) { assertNotNull(mappingMetadata); Map sourceAsMap = mappingMetadata.getSourceAsMap(); assertEquals(4, sourceAsMap.size()); diff --git a/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java b/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java index 856b3b2cd2099..cc4626bc89641 100644 --- a/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java @@ -47,17 +47,16 @@ public class GenericStoreDynamicTemplateTests extends OpenSearchSingleNodeTestCa public void testSimple() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json"); IndexService index = createIndex("test"); - client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping("test").setSource(mapping, XContentType.JSON).get(); MapperService mapperService = index.mapperService(); byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper() - .parse(new SourceToParse("test", "person", "1", new BytesArray(json), XContentType.JSON)); + .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray(json), XContentType.JSON)); client().admin() .indices() .preparePutMapping("test") - .setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON) .get(); Document doc = parsedDoc.rootDoc(); diff --git a/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java index 3905ac0969850..b5989d93b520d 100644 --- a/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java @@ -50,20 +50,22 @@ public void testMergeMultiField() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); assertThat(mapperService.fieldType("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper() + .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) + .rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping2.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -72,14 +74,16 @@ public void testMergeMultiField() throws Exception { assertThat(mapperService.fieldType("name.not_indexed2"), nullValue()); assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); - doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper() + .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) + .rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping3.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -89,7 +93,7 @@ public void testMergeMultiField() throws Exception { assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping4.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -103,20 +107,22 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); assertThat(mapperService.fieldType("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper() + .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) + .rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/upgrade1.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -125,14 +131,16 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { assertThat(mapperService.fieldType("name.not_indexed2"), nullValue()); assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); - doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper() + .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) + .rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/upgrade2.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -143,7 +151,11 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/upgrade3.json"); try { - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(mapping), + MapperService.MergeReason.MAPPING_UPDATE + ); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Cannot update parameter [index] from [true] to [false]")); diff --git a/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java index c2c96737506d9..918f5b325d81a 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java @@ -72,10 +72,13 @@ private void testMultiField(String mapping) throws Exception { IndexService indexService = createIndex("test"); MapperService mapperService = indexService.mapperService(); - indexService.mapperService().merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + indexService.mapperService() + .merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json")); - Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper() + .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) + .rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); @@ -139,7 +142,7 @@ private void testMultiField(String mapping) throws Exception { public void testBuildThenParse() throws Exception { IndexService indexService = createIndex("test"); DocumentMapper builderDocMapper = new DocumentMapper.Builder( - new RootObjectMapper.Builder("person").add( + new RootObjectMapper.Builder(MapperService.SINGLE_MAPPING_NAME).add( new TextFieldMapper.Builder("name", createDefaultIndexAnalyzers()).store(true) .addMultiField(new TextFieldMapper.Builder("indexed", createDefaultIndexAnalyzers()).index(true)) .addMultiField(new TextFieldMapper.Builder("not_indexed", createDefaultIndexAnalyzers()).index(false).store(true)) @@ -151,10 +154,11 @@ public void testBuildThenParse() throws Exception { // reparse it DocumentMapper docMapper = indexService.mapperService() .documentMapperParser() - .parse("person", new CompressedXContent(builtMapping)); + .parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) + .rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); diff --git a/server/src/test/java/org/opensearch/index/mapper/PathMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/PathMapperTests.java index 9fbe349c609a2..ed5470b861811 100644 --- a/server/src/test/java/org/opensearch/index/mapper/PathMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/PathMapperTests.java @@ -46,7 +46,7 @@ public void testPathMapping() throws IOException { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/path/test-mapping.json"); DocumentMapper docMapper = createIndex("test").mapperService() .documentMapperParser() - .parse("person", new CompressedXContent(mapping)); + .parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping)); // test full name assertThat(docMapper.mappers().getMapper("first1"), nullValue()); diff --git a/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java b/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java index a2fa7c68f67f9..4976372ceaf23 100644 --- a/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java @@ -47,17 +47,16 @@ public class PathMatchDynamicTemplateTests extends OpenSearchSingleNodeTestCase public void testSimple() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json"); IndexService index = createIndex("test"); - client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping("test").setSource(mapping, XContentType.JSON).get(); MapperService mapperService = index.mapperService(); byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper() - .parse(new SourceToParse("test", "person", "1", new BytesArray(json), XContentType.JSON)); + .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray(json), XContentType.JSON)); client().admin() .indices() .preparePutMapping("test") - .setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON) .get(); Document doc = parsedDoc.rootDoc(); diff --git a/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index aca124fcb8a93..0a01d86e76dea 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -74,7 +74,6 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws new CompressedXContent( Strings.toString( PutMappingRequest.buildFromSimplifiedDef( - "_doc", INTEGER_RANGE_FIELD_NAME, "type=integer_range", LONG_RANGE_FIELD_NAME, diff --git a/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java index 42a37c0b2ec1a..d54283f03759f 100644 --- a/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java @@ -257,7 +257,6 @@ public void testMappingVersion() { final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion(); final PutMappingRequest request = new PutMappingRequest(); request.indices("test"); - request.type("type"); request.source("field", "type=text"); client().admin().indices().putMapping(request).actionGet(); assertThat(clusterService.state().metadata().index("test").getMappingVersion(), Matchers.equalTo(1 + previousVersion)); @@ -267,7 +266,6 @@ public void testMappingVersion() { final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion(); final PutMappingRequest request = new PutMappingRequest(); request.indices("test"); - request.type("type"); request.source("field", "type=text"); client().admin().indices().putMapping(request).actionGet(); // the version should be unchanged after putting the same mapping again diff --git a/server/src/test/java/org/opensearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/GeoShapeQueryBuilderTests.java index 3eab92d7e2112..05fee1c043557 100644 --- a/server/src/test/java/org/opensearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/GeoShapeQueryBuilderTests.java @@ -69,7 +69,6 @@ public abstract class GeoShapeQueryBuilderTests extends AbstractQueryTestCase { protected static String indexedShapeId; - protected static String indexedShapeType; protected static String indexedShapePath; protected static String indexedShapeIndex; protected static String indexedShapeRouting; @@ -119,7 +118,6 @@ protected GetResponse executeGet(GetRequest getRequest) { public void clearShapeFields() { indexedShapeToReturn = null; indexedShapeId = null; - indexedShapeType = null; indexedShapePath = null; indexedShapeIndex = null; indexedShapeRouting = null; @@ -145,10 +143,7 @@ public void testNoShape() throws IOException { } public void testNoIndexedShape() throws IOException { - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> new GeoShapeQueryBuilder(fieldName(), null, "type") - ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(fieldName(), null, null)); assertEquals("either shape or indexedShapeId is required", e.getMessage()); } @@ -259,11 +254,6 @@ public void testSerializationFailsUnlessFetched() throws IOException { protected QueryBuilder parseQuery(XContentParser parser) throws IOException { QueryBuilder query = super.parseQuery(parser); assertThat(query, instanceOf(GeoShapeQueryBuilder.class)); - - GeoShapeQueryBuilder shapeQuery = (GeoShapeQueryBuilder) query; - if (shapeQuery.indexedShapeType() != null) { - assertWarnings(GeoShapeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } return query; } } diff --git a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java index c4aba907f4f40..bf42aca156805 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java @@ -390,13 +390,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws "_doc", new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef( - "_doc", - "string_boost", - "type=text", - "string_no_pos", - "type=text,index_options=docs" - ) + PutMappingRequest.buildFromSimplifiedDef("string_boost", "type=text", "string_no_pos", "type=text,index_options=docs") ) ), MapperService.MergeReason.MAPPING_UPDATE diff --git a/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java index 8cc24a658025a..b95d9f8d36ad8 100644 --- a/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java @@ -78,7 +78,6 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws new CompressedXContent( Strings.toString( PutMappingRequest.buildFromSimplifiedDef( - "_doc", TEXT_FIELD_NAME, "type=text", INT_FIELD_NAME, diff --git a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java index d2aa512a43ed3..8eaeaa17f7bb5 100644 --- a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java @@ -1075,7 +1075,7 @@ public void testDisabledFieldNamesField() throws Exception { .merge( "_doc", new CompressedXContent( - Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", "foo", "type=text", "_field_names", "enabled=false")) + Strings.toString(PutMappingRequest.buildFromSimplifiedDef("foo", "type=text", "_field_names", "enabled=false")) ), MapperService.MergeReason.MAPPING_UPDATE ); @@ -1091,9 +1091,7 @@ public void testDisabledFieldNamesField() throws Exception { .merge( "_doc", new CompressedXContent( - Strings.toString( - PutMappingRequest.buildFromSimplifiedDef("_doc", "foo", "type=text", "_field_names", "enabled=true") - ) + Strings.toString(PutMappingRequest.buildFromSimplifiedDef("foo", "type=text", "_field_names", "enabled=true")) ), MapperService.MergeReason.MAPPING_UPDATE ); diff --git a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java index f81938ff9df9b..c6cd667338303 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java @@ -93,7 +93,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws String docType = "_doc"; mapperService.merge( docType, - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, "m_s_m", "type=long"))), + new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("m_s_m", "type=long"))), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java index 62557da2adb62..2bfcec1bf786c 100644 --- a/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java @@ -88,6 +88,7 @@ import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsNot.not; @@ -283,7 +284,8 @@ protected boolean sortRequiresCustomComparator() { 0, GaussDecayFunctionBuilder.GAUSS_DECAY_FUNCTION, new IndexNumericFieldDataStub(), - MultiValueMode.MAX + MultiValueMode.MAX, + null ); private static final ScoreFunction EXP_DECAY_FUNCTION = new DecayFunctionBuilder.NumericFieldDataScoreFunction( 0, @@ -292,7 +294,8 @@ protected boolean sortRequiresCustomComparator() { 0, ExponentialDecayFunctionBuilder.EXP_DECAY_FUNCTION, new IndexNumericFieldDataStub(), - MultiValueMode.MAX + MultiValueMode.MAX, + null ); private static final ScoreFunction LIN_DECAY_FUNCTION = new DecayFunctionBuilder.NumericFieldDataScoreFunction( 0, @@ -301,7 +304,48 @@ protected boolean sortRequiresCustomComparator() { 0, LinearDecayFunctionBuilder.LINEAR_DECAY_FUNCTION, new IndexNumericFieldDataStub(), - MultiValueMode.MAX + MultiValueMode.MAX, + null + ); + + private static final ScoreFunction RANDOM_SCORE_FUNCTION_NAMED = new RandomScoreFunction(0, 0, new IndexFieldDataStub(), "func1"); + private static final ScoreFunction FIELD_VALUE_FACTOR_FUNCTION_NAMED = new FieldValueFactorFunction( + "test", + 1, + FieldValueFactorFunction.Modifier.LN, + 1.0, + null, + "func1" + ); + private static final ScoreFunction GAUSS_DECAY_FUNCTION_NAMED = new DecayFunctionBuilder.NumericFieldDataScoreFunction( + 0, + 1, + 0.1, + 0, + GaussDecayFunctionBuilder.GAUSS_DECAY_FUNCTION, + new IndexNumericFieldDataStub(), + MultiValueMode.MAX, + "func1" + ); + private static final ScoreFunction EXP_DECAY_FUNCTION_NAMED = new DecayFunctionBuilder.NumericFieldDataScoreFunction( + 0, + 1, + 0.1, + 0, + ExponentialDecayFunctionBuilder.EXP_DECAY_FUNCTION, + new IndexNumericFieldDataStub(), + MultiValueMode.MAX, + "func1" + ); + private static final ScoreFunction LIN_DECAY_FUNCTION_NAMED = new DecayFunctionBuilder.NumericFieldDataScoreFunction( + 0, + 1, + 0.1, + 0, + LinearDecayFunctionBuilder.LINEAR_DECAY_FUNCTION, + new IndexNumericFieldDataStub(), + MultiValueMode.MAX, + "func1" ); private static final ScoreFunction WEIGHT_FACTOR_FUNCTION = new WeightFactorFunction(4); private static final String TEXT = "The way out is through."; @@ -383,6 +427,58 @@ public void testExplainFunctionScoreQuery() throws IOException { assertThat(functionExplanation.getDetails()[0].getDetails()[1].getDetails().length, equalTo(0)); } + public void testExplainFunctionScoreQueryWithName() throws IOException { + Explanation functionExplanation = getFunctionScoreExplanation(searcher, RANDOM_SCORE_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "random score function (seed: 0, field: test, _name: func1)"); + assertThat(functionExplanation.getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, FIELD_VALUE_FACTOR_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "field value function(_name: func1): ln(doc['test'].value?:1.0 * factor=1.0)"); + assertThat(functionExplanation.getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, GAUSS_DECAY_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "Function for field test:"); + assertThat( + functionExplanation.getDetails()[0].getDetails()[0].toString(), + equalTo( + "0.1 = exp(-0.5*pow(MAX[Math.max(Math.abs" + + "(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)],2.0)/0.21714724095162594, _name: func1)\n" + ) + ); + assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, EXP_DECAY_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "Function for field test:"); + assertThat( + functionExplanation.getDetails()[0].getDetails()[0].toString(), + equalTo( + "0.1 = exp(- MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)] * 2.3025850929940455, _name: func1)\n" + ) + ); + assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, LIN_DECAY_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "Function for field test:"); + assertThat( + functionExplanation.getDetails()[0].getDetails()[0].toString(), + equalTo( + "0.1 = max(0.0, ((1.1111111111111112" + + " - MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)])/1.1111111111111112, _name: func1)\n" + ) + ); + assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, new WeightFactorFunction(4, RANDOM_SCORE_FUNCTION_NAMED)); + checkFunctionScoreExplanation(functionExplanation, "product of:"); + assertThat( + functionExplanation.getDetails()[0].getDetails()[0].toString(), + endsWith("random score function (seed: 0, field: test, _name: func1)\n") + ); + assertThat(functionExplanation.getDetails()[0].getDetails()[1].toString(), equalTo("4.0 = weight\n")); + assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0)); + assertThat(functionExplanation.getDetails()[0].getDetails()[1].getDetails().length, equalTo(0)); + } + public Explanation getFunctionScoreExplanation(IndexSearcher searcher, ScoreFunction scoreFunction) throws IOException { FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(new TermQuery(TERM), scoreFunction, CombineFunction.AVG, 0.0f, 100); Weight weight = searcher.createWeight(searcher.rewrite(functionScoreQuery), org.apache.lucene.search.ScoreMode.COMPLETE, 1f); diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index ec40b1736b242..d73f3f81c8138 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -34,6 +34,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.TermQuery; @@ -44,6 +45,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; +import org.junit.Assert; import org.opensearch.Assertions; import org.opensearch.OpenSearchException; import org.opensearch.Version; @@ -72,6 +74,7 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.lease.Releasable; @@ -108,6 +111,7 @@ import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.ParseContext; import org.opensearch.index.mapper.ParsedDocument; import org.opensearch.index.mapper.SeqNoFieldMapper; @@ -141,7 +145,6 @@ import org.opensearch.test.VersionUtils; import org.opensearch.test.store.MockFSDirectoryFactory; import org.opensearch.threadpool.ThreadPool; -import org.junit.Assert; import java.io.IOException; import java.nio.charset.Charset; @@ -168,7 +171,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongFunction; @@ -179,12 +181,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; -import static org.opensearch.common.lucene.Lucene.cleanLuceneIndex; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; -import static org.opensearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; @@ -204,6 +200,12 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.sameInstance; +import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; +import static org.opensearch.common.lucene.Lucene.cleanLuceneIndex; +import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.opensearch.test.hamcrest.RegexMatcher.matches; /** * Simple unit-test IndexShard related operations. @@ -3129,8 +3131,8 @@ public void testRecoverFromLocalShard() throws IOException { targetShard = newShard(targetRouting); targetShard.markAsRecovering("store", new RecoveryState(targetShard.routingEntry(), localNode, null)); - BiConsumer mappingConsumer = (type, mapping) -> { - assertNull(requestedMappingUpdates.put(type, mapping)); + Consumer mappingConsumer = mapping -> { + assertNull(requestedMappingUpdates.put(MapperService.SINGLE_MAPPING_NAME, mapping)); }; final IndexShard differentIndex = newShard(new ShardId("index_2", "index_2", 0), true); @@ -4126,11 +4128,11 @@ public InternalEngine recoverFromTranslog(TranslogRecoveryRunner translogRecover try { readyToSnapshotLatch.await(); shard.snapshotStoreMetadata(); - try (Engine.IndexCommitRef indexCommitRef = shard.acquireLastIndexCommit(randomBoolean())) { - shard.store().getMetadata(indexCommitRef.getIndexCommit()); + try (GatedCloseable wrappedIndexCommit = shard.acquireLastIndexCommit(randomBoolean())) { + shard.store().getMetadata(wrappedIndexCommit.get()); } - try (Engine.IndexCommitRef indexCommitRef = shard.acquireSafeIndexCommit()) { - shard.store().getMetadata(indexCommitRef.getIndexCommit()); + try (GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit()) { + shard.store().getMetadata(wrappedSafeCommit.get()); } } catch (InterruptedException | IOException e) { throw new AssertionError(e); diff --git a/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java b/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java index c81a773ecdb10..f35911c8a3553 100644 --- a/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java +++ b/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java @@ -72,7 +72,7 @@ public void testTook() throws Exception { createIndex("test", Settings.EMPTY, "type1", mapping); ensureGreen(); - client().prepareIndex("test", "type1", "0").setSource("field", "foo bar").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("0").setSource("field", "foo bar").setRefreshPolicy(IMMEDIATE).get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); @@ -107,7 +107,7 @@ public void testDocFreqs() throws IOException { BulkRequestBuilder bulk = client().prepareBulk(); for (int i = 0; i < max; i++) { bulk.add( - client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("text", "the quick brown fox jumped over the lazy dog") + client().prepareIndex("test").setId(Integer.toString(i)).setSource("text", "the quick brown fox jumped over the lazy dog") ); } bulk.get(); @@ -148,7 +148,7 @@ public void testWithIndexedPhrases() throws IOException { BulkRequestBuilder bulk = client().prepareBulk(); for (int i = 0; i < max; i++) { bulk.add( - client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("text", "the quick brown fox jumped over the lazy dog") + client().prepareIndex("test").setId(Integer.toString(i)).setSource("text", "the quick brown fox jumped over the lazy dog") ); } bulk.get(); diff --git a/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java b/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java index e7b45f053e919..ff97b87708202 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java @@ -179,7 +179,7 @@ public void testCloseWhileOngoingRequest() throws Exception { .prepareCreate("test") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) ); - node.client().prepareIndex("test", "_doc", "1").setSource(Collections.emptyMap()).get(); + node.client().prepareIndex("test").setId("1").setSource(Collections.emptyMap()).get(); OpenSearchAssertions.assertAllSuccessful(node.client().admin().indices().prepareRefresh("test").get()); assertEquals(2, indicesService.indicesRefCount.refCount()); @@ -213,7 +213,7 @@ public void testCloseAfterRequestHasUsedQueryCache() throws Exception { .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) ) ); - node.client().prepareIndex("test", "_doc", "1").setSource(Collections.singletonMap("foo", 3L)).get(); + node.client().prepareIndex("test").setId("1").setSource(Collections.singletonMap("foo", 3L)).get(); OpenSearchAssertions.assertAllSuccessful(node.client().admin().indices().prepareRefresh("test").get()); assertEquals(2, indicesService.indicesRefCount.refCount()); @@ -256,7 +256,7 @@ public void testCloseWhileOngoingRequestUsesQueryCache() throws Exception { .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) ) ); - node.client().prepareIndex("test", "_doc", "1").setSource(Collections.singletonMap("foo", 3L)).get(); + node.client().prepareIndex("test").setId("1").setSource(Collections.singletonMap("foo", 3L)).get(); OpenSearchAssertions.assertAllSuccessful(node.client().admin().indices().prepareRefresh("test").get()); assertEquals(2, indicesService.indicesRefCount.refCount()); @@ -298,7 +298,7 @@ public void testCloseWhileOngoingRequestUsesRequestCache() throws Exception { .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) ) ); - node.client().prepareIndex("test", "_doc", "1").setSource(Collections.singletonMap("foo", 3L)).get(); + node.client().prepareIndex("test").setId("1").setSource(Collections.singletonMap("foo", 3L)).get(); OpenSearchAssertions.assertAllSuccessful(node.client().admin().indices().prepareRefresh("test").get()); assertEquals(2, indicesService.indicesRefCount.refCount()); diff --git a/server/src/test/java/org/opensearch/indices/IndicesServiceTests.java b/server/src/test/java/org/opensearch/indices/IndicesServiceTests.java index bcd0fa5d54c56..8dd156dfcd0d2 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesServiceTests.java @@ -272,7 +272,7 @@ public void testDeleteIndexStore() throws Exception { assertNull(meta.index("test")); test = createIndex("test"); - client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); client().admin().indices().prepareFlush("test").get(); assertHitCount(client().prepareSearch("test").get(), 1); IndexMetadata secondMetadata = clusterService.state().metadata().index("test"); diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java index 78a0f9721e5ad..b195984de64b5 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java @@ -46,6 +46,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.SetOnce; +import org.junit.After; +import org.junit.Before; import org.opensearch.ExceptionsHelper; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -59,6 +61,7 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.FileSystemUtils; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lucene.store.IndexOutputOutputStream; @@ -93,14 +96,12 @@ import org.opensearch.index.translog.Translog; import org.opensearch.test.CorruptionUtils; import org.opensearch.test.DummyShardLock; -import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.IndexSettingsModule; +import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.FixedExecutorBuilder; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; import java.io.IOException; import java.io.OutputStream; @@ -654,7 +655,7 @@ public void testThrowExceptionOnPrimaryRelocatedBeforePhase1Started() throws IOE when(shard.seqNoStats()).thenReturn(mock(SeqNoStats.class)); when(shard.segmentStats(anyBoolean(), anyBoolean())).thenReturn(mock(SegmentsStats.class)); when(shard.isRelocatedPrimary()).thenReturn(true); - when(shard.acquireSafeIndexCommit()).thenReturn(mock(Engine.IndexCommitRef.class)); + when(shard.acquireSafeIndexCommit()).thenReturn(mock(GatedCloseable.class)); doAnswer(invocation -> { ((ActionListener) invocation.getArguments()[0]).onResponse(() -> {}); return null; diff --git a/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java b/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java index 69923e4390ead..6a08f5115d1e2 100644 --- a/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java +++ b/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java @@ -69,10 +69,10 @@ public void testLastAccessTimeUpdate() throws Exception { final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool); final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); try (RecoveriesCollection.RecoveryRef status = collection.getRecovery(recoveryId)) { - final long lastSeenTime = status.target().lastAccessTime(); + final long lastSeenTime = status.get().lastAccessTime(); assertBusy(() -> { try (RecoveriesCollection.RecoveryRef currentStatus = collection.getRecovery(recoveryId)) { - assertThat("access time failed to update", lastSeenTime, lessThan(currentStatus.target().lastAccessTime())); + assertThat("access time failed to update", lastSeenTime, lessThan(currentStatus.get().lastAccessTime())); } }); } finally { @@ -120,7 +120,7 @@ public void testRecoveryCancellation() throws Exception { final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); final long recoveryId2 = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); try (RecoveriesCollection.RecoveryRef recoveryRef = collection.getRecovery(recoveryId)) { - ShardId shardId = recoveryRef.target().shardId(); + ShardId shardId = recoveryRef.get().shardId(); assertTrue("failed to cancel recoveries", collection.cancelRecoveriesForShard(shardId, "test")); assertThat("all recoveries should be cancelled", collection.size(), equalTo(0)); } finally { @@ -160,8 +160,8 @@ public void testResetRecovery() throws Exception { assertEquals(currentAsTarget, shard.recoveryStats().currentAsTarget()); try (RecoveriesCollection.RecoveryRef newRecoveryRef = collection.getRecovery(resetRecoveryId)) { shards.recoverReplica(shard, (s, n) -> { - assertSame(s, newRecoveryRef.target().indexShard()); - return newRecoveryRef.target(); + assertSame(s, newRecoveryRef.get().indexShard()); + return newRecoveryRef.get(); }, false); } shards.assertAllEqual(numDocs); diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java index e2a5a26850f73..94813d1f7cd33 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -126,7 +126,7 @@ public void testRetrieveSnapshots() throws Exception { int numDocs = randomIntBetween(10, 20); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); - client().prepareIndex(indexName, "type1", id).setSource("text", "sometext").get(); + client().prepareIndex(indexName).setId(id).setSource("text", "sometext").get(); } client().admin().indices().prepareFlush(indexName).get(); diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java index 2400a59df6021..707210abad948 100644 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java @@ -32,48 +32,16 @@ package org.opensearch.rest.action.admin.indices; -import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.util.HashMap; import java.util.Map; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestCreateIndexActionTests extends RestActionTestCase { - private RestCreateIndexAction action; - - @Before - public void setupAction() { - action = new RestCreateIndexAction(); - controller().registerHandler(action); - } - - public void testIncludeTypeName() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index") - .withParams(params) - .build(); - - action.prepareRequest(deprecatedRequest, mock(NodeClient.class)); - assertWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index") - .build(); - action.prepareRequest(validRequest, mock(NodeClient.class)); - } +public class RestCreateIndexActionTests extends OpenSearchTestCase { public void testPrepareTypelessRequest() throws IOException { XContentBuilder content = XContentFactory.jsonBuilder() @@ -95,8 +63,7 @@ public void testPrepareTypelessRequest() throws IOException { .endObject(); Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = false; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); + Map source = RestCreateIndexAction.prepareMappings(contentAsMap); XContentBuilder expectedContent = XContentFactory.jsonBuilder() .startObject() @@ -126,34 +93,6 @@ public void testPrepareTypelessRequest() throws IOException { assertEquals(expectedContentAsMap, source); } - public void testPrepareTypedRequest() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder() - .startObject() - .startObject("mappings") - .startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "keyword") - .endObject() - .startObject("field2") - .field("type", "text") - .endObject() - .endObject() - .endObject() - .endObject() - .startObject("aliases") - .startObject("read_alias") - .endObject() - .endObject() - .endObject(); - - Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = true; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); - - assertEquals(contentAsMap, source); - } - public void testMalformedMappings() throws IOException { XContentBuilder content = XContentFactory.jsonBuilder() .startObject() @@ -166,8 +105,7 @@ public void testMalformedMappings() throws IOException { Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = false; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); + Map source = RestCreateIndexAction.prepareMappings(contentAsMap); assertEquals(contentAsMap, source); } } diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java deleted file mode 100644 index 374b2cb0e8636..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.admin.indices; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestGetIndicesActionTests extends RestActionTestCase { - - /** - * Test that setting the "include_type_name" parameter raises a warning for the GET request - */ - public void testIncludeTypeNamesWarning() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index") - .withParams(params) - .build(); - - RestGetIndicesAction handler = new RestGetIndicesAction(); - handler.prepareRequest(request, mock(NodeClient.class)); - assertWarnings(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE); - - // the same request without the parameter should pass without warning - request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET).withPath("/some_index").build(); - handler.prepareRequest(request, mock(NodeClient.class)); - } - - /** - * Test that setting the "include_type_name" parameter doesn't raises a warning if the HEAD method is used (indices.exists) - */ - public void testIncludeTypeNamesWarningExists() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.HEAD) - .withPath("/some_index") - .withParams(params) - .build(); - - RestGetIndicesAction handler = new RestGetIndicesAction(); - handler.prepareRequest(request, mock(NodeClient.class)); - } -} diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 7ba31d158f3c5..4e342875e4599 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -222,7 +222,7 @@ protected Settings nodeSettings() { public void testClearOnClose() { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); @@ -234,7 +234,7 @@ public void testClearOnClose() { public void testClearOnStop() { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); @@ -246,7 +246,7 @@ public void testClearOnStop() { public void testClearIndexDelete() { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); @@ -259,7 +259,7 @@ public void testClearIndexDelete() { public void testCloseSearchContextOnRewriteException() { // if refresh happens while checking the exception, the subsequent reference count might not match, so we switch it off createIndex("index", Settings.builder().put("index.refresh_interval", -1).build()); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchService service = getInstanceFromNode(SearchService.class); IndicesService indicesService = getInstanceFromNode(IndicesService.class); @@ -278,7 +278,7 @@ public void testCloseSearchContextOnRewriteException() { public void testSearchWhileIndexDeleted() throws InterruptedException { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchService service = getInstanceFromNode(SearchService.class); IndicesService indicesService = getInstanceFromNode(IndicesService.class); @@ -302,7 +302,7 @@ public void run() { } catch (InterruptedException e) { throw new AssertionError(e); } - client().prepareIndex("index", "type") + client().prepareIndex("index") .setSource("field", "value") .setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())) .execute(new ActionListener() { @@ -387,7 +387,7 @@ public void onFailure(Exception e) { public void testSearchWhileIndexDeletedDoesNotLeakSearchContext() throws ExecutionException, InterruptedException { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); @@ -633,7 +633,7 @@ public void testIgnoreScriptfieldIfSizeZero() throws IOException { */ public void testMaxOpenScrollContexts() throws Exception { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); final SearchService service = getInstanceFromNode(SearchService.class); final IndicesService indicesService = getInstanceFromNode(IndicesService.class); @@ -958,7 +958,7 @@ public void testCanMatch() throws Exception { ).canMatch() ); // the source can match and can be rewritten to a match_none, but not the alias filter - final IndexResponse response = client().prepareIndex("index", "_doc", "1").setSource("id", "1").get(); + final IndexResponse response = client().prepareIndex("index").setId("1").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); searchRequest.indices("alias").source(new SearchSourceBuilder().query(new TermQueryBuilder("id", "1"))); assertFalse( @@ -1050,7 +1050,7 @@ public void testSetSearchThrottled() { final SearchService service = getInstanceFromNode(SearchService.class); Index index = resolveIndex("throttled_threadpool_index"); assertTrue(service.getIndicesService().indexServiceSafe(index).getIndexSettings().isSearchThrottled()); - client().prepareIndex("throttled_threadpool_index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("throttled_threadpool_index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("throttled_threadpool_index") .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) .setSize(1) @@ -1104,7 +1104,7 @@ public void testExpandSearchThrottled() { ) ).actionGet(); - client().prepareIndex("throttled_threadpool_index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("throttled_threadpool_index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); assertHitCount(client().prepareSearch().get(), 1L); assertHitCount(client().prepareSearch().setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED).get(), 1L); } @@ -1116,7 +1116,7 @@ public void testExpandSearchFrozen() { new InternalOrPrivateSettingsPlugin.UpdateInternalOrPrivateAction.Request("frozen_index", "index.frozen", "true") ).actionGet(); - client().prepareIndex("frozen_index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("frozen_index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); assertHitCount(client().prepareSearch().get(), 0L); assertHitCount(client().prepareSearch().setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED).get(), 1L); } @@ -1315,7 +1315,7 @@ public void testDeleteIndexWhileSearch() throws Exception { createIndex("test"); int numDocs = randomIntBetween(1, 20); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "_doc").setSource("f", "v").get(); + client().prepareIndex("test").setSource("f", "v").get(); } client().admin().indices().prepareRefresh("test").get(); AtomicBoolean stopped = new AtomicBoolean(false); diff --git a/server/src/test/java/org/opensearch/search/aggregations/AggregationCollectorTests.java b/server/src/test/java/org/opensearch/search/aggregations/AggregationCollectorTests.java index c392e588319af..c48e99e16db2e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/AggregationCollectorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/AggregationCollectorTests.java @@ -44,7 +44,7 @@ public class AggregationCollectorTests extends OpenSearchSingleNodeTestCase { public void testNeedsScores() throws Exception { IndexService index = createIndex("idx"); - client().prepareIndex("idx", "type", "1").setSource("f", 5).execute().get(); + client().prepareIndex("idx").setId("1").setSource("f", 5).execute().get(); client().admin().indices().prepareRefresh("idx").get(); // simple field aggregation, no scores needed diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java index 24022a6f41a09..bf16f70d400fc 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -115,7 +115,7 @@ protected void indexData() throws Exception { protected List indexDoc(String shard, String key, int times) throws Exception { IndexRequestBuilder[] builders = new IndexRequestBuilder[times]; for (int i = 0; i < times; i++) { - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setRouting(shard) .setSource(jsonBuilder().startObject().field("key", key).field("value", 1).endObject()); } diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java index 1446feefb2c7a..9243a1ccd517e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -137,7 +137,7 @@ public void setupSuiteScopeCluster() throws Exception { multiVal[0] = multiValues[i % numUniqueGeoPoints]; multiVal[1] = multiValues[(i + 1) % numUniqueGeoPoints]; builders.add( - client().prepareIndex(IDX_NAME, "type") + client().prepareIndex(IDX_NAME) .setSource( jsonBuilder().startObject() .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) @@ -193,7 +193,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 5; i++) { builders.add( - client().prepareIndex(DATELINE_IDX_NAME, "type") + client().prepareIndex(DATELINE_IDX_NAME) .setSource( jsonBuilder().startObject() .array(SINGLE_VALUED_FIELD_NAME, geoValues[i].lon(), geoValues[i].lat()) @@ -221,7 +221,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 2000; i++) { singleVal = singleValues[i % numUniqueGeoPoints]; builders.add( - client().prepareIndex(HIGH_CARD_IDX_NAME, "type") + client().prepareIndex(HIGH_CARD_IDX_NAME) .setSource( jsonBuilder().startObject() .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) @@ -244,7 +244,7 @@ public void setupSuiteScopeCluster() throws Exception { } builders.add( - client().prepareIndex(IDX_ZERO_NAME, "type") + client().prepareIndex(IDX_ZERO_NAME) .setSource(jsonBuilder().startObject().array(SINGLE_VALUED_FIELD_NAME, 0.0, 1.0).endObject()) ); assertAcked(prepareCreate(IDX_ZERO_NAME).addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point")); diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java index c28f980360a40..f866d817a7c43 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java @@ -49,7 +49,7 @@ public class ValuesSourceConfigTests extends OpenSearchSingleNodeTestCase { public void testKeyword() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bytes", "type=keyword"); - client().prepareIndex("index", "type", "1").setSource("bytes", "abc").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("bytes", "abc").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -75,7 +75,7 @@ public void testKeyword() throws Exception { public void testEmptyKeyword() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bytes", "type=keyword"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -106,7 +106,7 @@ public void testEmptyKeyword() throws Exception { public void testUnmappedKeyword() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -136,7 +136,7 @@ public void testUnmappedKeyword() throws Exception { public void testLong() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "long", "type=long"); - client().prepareIndex("index", "type", "1").setSource("long", 42).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("long", 42).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -162,7 +162,7 @@ public void testLong() throws Exception { public void testEmptyLong() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "long", "type=long"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -193,7 +193,7 @@ public void testEmptyLong() throws Exception { public void testUnmappedLong() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -224,7 +224,7 @@ public void testUnmappedLong() throws Exception { public void testBoolean() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bool", "type=boolean"); - client().prepareIndex("index", "type", "1").setSource("bool", true).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("bool", true).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -250,7 +250,7 @@ public void testBoolean() throws Exception { public void testEmptyBoolean() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bool", "type=boolean"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -281,7 +281,7 @@ public void testEmptyBoolean() throws Exception { public void testUnmappedBoolean() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -331,10 +331,7 @@ public void testTypeFieldDeprecation() { public void testFieldAlias() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "field", "type=keyword", "alias", "type=alias,path=field"); - client().prepareIndex("index", "type", "1") - .setSource("field", "value") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); diff --git a/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java index 59519226685e2..66b18bc42ad50 100644 --- a/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java @@ -118,6 +118,46 @@ public void testSerialization() throws IOException { } } + public void testSerializationWithPercentilesQueryObject() throws IOException { + String restContent = "{\n" + + " \"aggregations\": {" + + " \"percentiles_duration\": {\n" + + " \"percentiles\" : {\n" + + " \"field\": \"duration\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + String expectedContent = "{\"aggregations\":{" + + "\"percentiles_duration\":{" + + "\"percentiles\":{" + + "\"field\":\"duration\"," + + "\"percents\":[1.0,5.0,25.0,50.0,75.0,95.0,99.0]," + + "\"keyed\":true," + + "\"tdigest\":{" + + "\"compression\":100.0" + + "}" + + "}" + + "}" + + "}}"; + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); + + try (BytesStreamOutput output = new BytesStreamOutput()) { + searchSourceBuilder.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { + SearchSourceBuilder deserializedBuilder = new SearchSourceBuilder(in); + String actualContent = deserializedBuilder.toString(); + + assertEquals(expectedContent, actualContent); + assertEquals(searchSourceBuilder.hashCode(), deserializedBuilder.hashCode()); + assertNotSame(searchSourceBuilder, deserializedBuilder); + } + } + } + } + public void testShallowCopy() { for (int i = 0; i < 10; i++) { SearchSourceBuilder original = createSearchSourceBuilder(); diff --git a/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java index e6280e5c6924a..b46ac4a72952b 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java @@ -85,7 +85,7 @@ public void testNullShape() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("aNullshape") .setSource("{\"geo\": null}", XContentType.JSON) .setRefreshPolicy(IMMEDIATE) @@ -99,13 +99,13 @@ public void testIndexPointsFilterRectangle() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field("name", "Document 1").field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field("name", "Document 2").field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -137,13 +137,13 @@ public void testIndexPointsCircle() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field("name", "Document 1").field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field("name", "Document 2").field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -170,13 +170,13 @@ public void testIndexPointsPolygon() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -206,19 +206,19 @@ public void testIndexPointsMultiPolygon() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field("name", "Document 1").field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field("name", "Document 2").field(defaultGeoFieldName, "POINT(-40 -40)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("3") .setSource(jsonBuilder().startObject().field("name", "Document 3").field(defaultGeoFieldName, "POINT(-50 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -260,13 +260,13 @@ public void testIndexPointsRectangle() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field("name", "Document 1").field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field("name", "Document 2").field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -288,13 +288,13 @@ public void testIndexPointsIndexedRectangle() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping(defaultIndexName, xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("point1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("point2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -313,13 +313,13 @@ public void testIndexPointsIndexedRectangle() throws Exception { client().admin().indices().prepareCreate(indexedShapeIndex).addMapping(defaultIndexName, xcb).get(); ensureGreen(); - client().prepareIndex(indexedShapeIndex, "_doc") + client().prepareIndex(indexedShapeIndex) .setId("shape1") .setSource(jsonBuilder().startObject().field(indexedShapePath, "BBOX(-50, -40, -45, -55)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(indexedShapeIndex, "_doc") + client().prepareIndex(indexedShapeIndex) .setId("shape2") .setSource(jsonBuilder().startObject().field(indexedShapePath, "BBOX(-60, -50, -50, -60)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -355,19 +355,19 @@ public void testRectangleSpanningDateline() throws Exception { client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-169 0)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-179 0)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("3") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(171 0)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -388,25 +388,25 @@ public void testPolygonSpanningDateline() throws Exception { client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-169 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-179 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("3") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(179 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("4") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(171 7)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -432,19 +432,19 @@ public void testMultiPolygonSpanningDateline() throws Exception { client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-169 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-179 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("3") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(171 7)").endObject()) .setRefreshPolicy(IMMEDIATE) diff --git a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java index cc3827168b82a..eb8cc7e6113e1 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java @@ -152,14 +152,16 @@ public void testShapeFetchingPath() throws Exception { String location = "\"geo\" : {\"type\":\"polygon\", \"coordinates\":[[[-10,-10],[10,-10],[10,10],[-10,10],[-10,-10]]]}"; - client().prepareIndex("shapes", "type", "1") + client().prepareIndex("shapes") + .setId("1") .setSource( String.format(Locale.ROOT, "{ %s, \"1\" : { %s, \"2\" : { %s, \"3\" : { %s } }} }", location, location, location, location), XContentType.JSON ) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startObject("geo") @@ -264,7 +266,7 @@ public void testRandomGeoCollectionQuery() throws Exception { ensureGreen(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "_doc").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); // Create a random geometry collection to query GeometryCollectionBuilder queryCollection = RandomShapeGenerator.createGeometryCollection(random()); @@ -442,7 +444,8 @@ public void testEdgeCases() throws Exception { client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); ensureGreen(); - client().prepareIndex("test", "type1", "blakely") + client().prepareIndex("test") + .setId("blakely") .setSource( jsonBuilder().startObject() .field("name", "Blakely Island") @@ -493,7 +496,8 @@ public void testIndexedShapeReferenceSourceDisabled() throws Exception { EnvelopeBuilder shape = new EnvelopeBuilder(new Coordinate(-45, 45), new Coordinate(45, -45)); - client().prepareIndex("shapes", "shape_type", "Big_Rectangle") + client().prepareIndex("shapes") + .setId("Big_Rectangle") .setSource(jsonBuilder().startObject().field("shape", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -546,7 +550,7 @@ public void testPointQuery() throws Exception { .actionGet(); } XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery("geo", pb); geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); @@ -587,7 +591,7 @@ public void testContainsShapeQuery() throws Exception { } XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); // index the mbr of the collection EnvelopeBuilder env = new EnvelopeBuilder( @@ -595,7 +599,7 @@ public void testContainsShapeQuery() throws Exception { new Coordinate(mbr.getMaxX(), mbr.getMinY()) ); docSource = env.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "2").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("2").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ShapeBuilder filterShape = (gcb.getShapeAt(randomIntBetween(0, gcb.numShapes() - 1))); GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("geo", filterShape).relation(ShapeRelation.CONTAINS); @@ -613,7 +617,7 @@ public void testExistsQuery() throws Exception { client().admin().indices().prepareCreate("test").addMapping("type", builder).execute().actionGet(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ExistsQueryBuilder eqb = QueryBuilders.existsQuery("geo"); SearchResponse result = client().prepareSearch("test").setQuery(eqb).get(); @@ -644,7 +648,8 @@ public void testPointsOnly() throws Exception { ShapeBuilder shape = RandomShapeGenerator.createShape(random()); try { - client().prepareIndex("geo_points_only", "type1", "1") + client().prepareIndex("geo_points_only") + .setId("1") .setSource(jsonBuilder().startObject().field("location", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -683,14 +688,16 @@ public void testPointsOnlyExplicit() throws Exception { // MULTIPOINT ShapeBuilder shape = RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.MULTIPOINT); - client().prepareIndex("geo_points_only", "type1", "1") + client().prepareIndex("geo_points_only") + .setId("1") .setSource(jsonBuilder().startObject().field("geo", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); // POINT shape = RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.POINT); - client().prepareIndex("geo_points_only", "type1", "2") + client().prepareIndex("geo_points_only") + .setId("2") .setSource(jsonBuilder().startObject().field("geo", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -709,11 +716,13 @@ public void testIndexedShapeReference() throws Exception { EnvelopeBuilder shape = new EnvelopeBuilder(new Coordinate(-45, 45), new Coordinate(45, -45)); - client().prepareIndex("shapes", "shape_type", "Big_Rectangle") + client().prepareIndex("shapes") + .setId("Big_Rectangle") .setSource(jsonBuilder().startObject().field("shape", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "Document 1") @@ -762,7 +771,8 @@ public void testFieldAlias() throws IOException { createIndex("test", Settings.EMPTY, "type", mapping); ShapeBuilder shape = RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.MULTIPOINT); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("location", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -785,7 +795,7 @@ public void testQueryRandomGeoCollection() throws Exception { client().admin().indices().prepareCreate("test").addMapping("type", builder).get(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ShapeBuilder filterShape = (gcb.getShapeAt(gcb.numShapes() - 1)); @@ -832,7 +842,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { .endArray() .endObject() .endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery( "geo", @@ -946,7 +956,7 @@ public void testIndexRectangleSpanningDateLine() throws Exception { EnvelopeBuilder envelopeBuilder = new EnvelopeBuilder(new Coordinate(178, 10), new Coordinate(-178, -10)); XContentBuilder docSource = envelopeBuilder.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type1", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ShapeBuilder filterShape = new PointBuilder(179, 0); diff --git a/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java b/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java index c80ce807bf736..e1002e114822e 100644 --- a/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java +++ b/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java @@ -110,6 +110,34 @@ public void testExplain() throws IOException { assertThat(explanation.getValue(), equalTo(1.0)); } + public void testExplainWithName() throws IOException { + Script script = new Script("script using explain"); + ScoreScript.LeafFactory factory = newFactory(script, true, explanation -> { + assertNotNull(explanation); + explanation.set("this explains the score"); + return 1.0; + }); + + ScriptScoreQuery query = new ScriptScoreQuery( + Queries.newMatchAllQuery(), + "query1", + script, + factory, + null, + "index", + 0, + Version.CURRENT + ); + Weight weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f); + Explanation explanation = weight.explain(leafReaderContext, 0); + assertNotNull(explanation); + assertThat(explanation.getDescription(), equalTo("this explains the score")); + assertThat(explanation.getValue(), equalTo(1.0)); + + assertThat(explanation.getDetails(), arrayWithSize(1)); + assertThat(explanation.getDetails()[0].getDescription(), equalTo("*:* (_name: query1)")); + } + public void testExplainDefault() throws IOException { Script script = new Script("script without setting explanation"); ScoreScript.LeafFactory factory = newFactory(script, true, explanation -> 1.5); diff --git a/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index 8e33f09f7c3e9..c2c2728a348f5 100644 --- a/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -104,13 +104,13 @@ public static void index01Docs(String type, String settings, OpenSearchIntegTest ); String[] gb = { "0", "1" }; List indexRequestBuilderList = new ArrayList<>(); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2").setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5").setSource(TEXT_FIELD, gb, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6").setSource(TEXT_FIELD, gb, CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("1").setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("2").setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("3").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("4").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("5").setSource(TEXT_FIELD, gb, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("6").setSource(TEXT_FIELD, gb, CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("7").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); testCase.indexRandom(true, false, indexRequestBuilderList); } } diff --git a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json index 70bf6dc7b5de0..557704b0bd4e3 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json +++ b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "dynamic_templates":[ { "template_1":{ @@ -11,4 +11,4 @@ } ] } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json index ad46106342639..8aa6d6ef8a613 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json +++ b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "dynamic_templates":[ { "template_1":{ diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping1.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping1.json index dbd74d33780d7..7828a4dbf587c 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping1.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping1.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping2.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping2.json index b4d1843928891..0d6274dd50d3a 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping2.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping2.json @@ -1,5 +1,5 @@ { - "person" :{ + "_doc" :{ "properties" :{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping3.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping3.json index 459d9fc9b1eec..60a2751ede630 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping3.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping3.json @@ -1,5 +1,5 @@ { - "person" : { + "_doc" : { "properties" :{ "name" : { "type" : "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping4.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping4.json index 416633c4fc106..fe3fb35fc7def 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping4.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping4.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade1.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade1.json index b00ea46b56d61..acffa3100539e 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade1.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade1.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade2.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade2.json index 563567f463eff..8acb62e0a1f25 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade2.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade2.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade3.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade3.json index 5985ff316a772..c8552f41f8ca6 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade3.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade3.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/test-multi-fields.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/test-multi-fields.json index b7317aba3c148..9f9c18a30f8e6 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/test-multi-fields.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/test-multi-fields.json @@ -1,5 +1,5 @@ { - "person": { + "_doc": { "properties": { "name": { "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/path/test-mapping.json b/server/src/test/resources/org/opensearch/index/mapper/path/test-mapping.json index 8d7505624b1b8..e3a14f83b4743 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/path/test-mapping.json +++ b/server/src/test/resources/org/opensearch/index/mapper/path/test-mapping.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name1":{ "type":"object", diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index f6b8ed8174743..9628b123eb4ec 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -61,6 +61,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.junit.After; +import org.junit.Before; import org.opensearch.Version; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.support.replication.ReplicationResponse; @@ -74,6 +76,7 @@ import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.Settings; @@ -113,12 +116,10 @@ import org.opensearch.indices.breaker.CircuitBreakerService; import org.opensearch.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.DummyShardLock; -import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.IndexSettingsModule; +import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; import java.io.IOException; import java.nio.charset.Charset; @@ -143,14 +144,14 @@ import static java.util.Collections.emptyList; import static java.util.Collections.shuffle; -import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; -import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; +import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; +import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; +import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; public abstract class EngineTestCase extends OpenSearchTestCase { @@ -1393,8 +1394,8 @@ public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine e final long retainedOps = engine.config().getIndexSettings().getSoftDeleteRetentionOperations(); final long seqNoForRecovery; if (engine.config().getIndexSettings().isSoftDeleteEnabled()) { - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - seqNoForRecovery = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + seqNoForRecovery = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; } } else { seqNoForRecovery = engine.getMinRetainedSeqNo(); diff --git a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java index 5bb4ee5f29f16..dfc34add3863a 100644 --- a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java @@ -907,7 +907,7 @@ private void executeShardBulkOnPrimary( final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); primary.acquirePrimaryOperationPermit(permitAcquiredFuture, ThreadPool.Names.SAME, request); try (Releasable ignored = permitAcquiredFuture.actionGet()) { - MappingUpdatePerformer noopMappingUpdater = (update, shardId, type, listener1) -> {}; + MappingUpdatePerformer noopMappingUpdater = (update, shardId, listener1) -> {}; TransportShardBulkAction.performOnPrimary( request, primary, diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index cc0b2219377d5..ae5d7293feeb9 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -32,6 +32,7 @@ package org.opensearch.index.shard; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.store.Directory; import org.opensearch.Version; import org.opensearch.action.admin.indices.flush.FlushRequest; @@ -52,6 +53,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; @@ -115,10 +117,10 @@ import java.util.function.Consumer; import java.util.stream.Collectors; -import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; /** * A base class for unit tests that need to create and shutdown {@link IndexShard} instances easily, @@ -1036,13 +1038,13 @@ protected String snapshotShard(final IndexShard shard, final Snapshot snapshot, ); final PlainActionFuture future = PlainActionFuture.newFuture(); final String shardGen; - try (Engine.IndexCommitRef indexCommitRef = shard.acquireLastIndexCommit(true)) { + try (GatedCloseable wrappedIndexCommit = shard.acquireLastIndexCommit(true)) { repository.snapshotShard( shard.store(), shard.mapperService(), snapshot.getSnapshotId(), indexId, - indexCommitRef.getIndexCommit(), + wrappedIndexCommit.get(), null, snapshotStatus, Version.CURRENT, diff --git a/test/framework/src/main/java/org/opensearch/repositories/AbstractThirdPartyRepositoryTestCase.java b/test/framework/src/main/java/org/opensearch/repositories/AbstractThirdPartyRepositoryTestCase.java index e0aa8cad3fdf2..8ddaba8067b2d 100644 --- a/test/framework/src/main/java/org/opensearch/repositories/AbstractThirdPartyRepositoryTestCase.java +++ b/test/framework/src/main/java/org/opensearch/repositories/AbstractThirdPartyRepositoryTestCase.java @@ -110,9 +110,9 @@ public void testCreateSnapshot() { logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-1").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-2").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-3").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); } client().admin().indices().prepareRefresh().get(); @@ -182,9 +182,9 @@ public void testCleanup() throws Exception { logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-1").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-2").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-3").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); } client().admin().indices().prepareRefresh().get(); diff --git a/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchBlobStoreRepositoryIntegTestCase.java index ac7dfcb709c5f..7d9810a11e143 100644 --- a/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchBlobStoreRepositoryIntegTestCase.java @@ -326,7 +326,7 @@ public void testSnapshotAndRestore() throws Exception { logger.info("--> delete {} random documents from {}", deleteCount, index); for (int i = 0; i < deleteCount; i++) { int doc = randomIntBetween(0, docCount - 1); - client().prepareDelete(index, index, Integer.toString(doc)).get(); + client().prepareDelete(index, Integer.toString(doc)).get(); } client().admin().indices().prepareRefresh(index).get(); } @@ -379,7 +379,7 @@ public void testMultipleSnapshotAndRollback() throws Exception { logger.info("--> delete {} random documents from {}", deleteCount, indexName); for (int j = 0; j < deleteCount; j++) { int doc = randomIntBetween(0, docCount - 1); - client().prepareDelete(indexName, indexName, Integer.toString(doc)).get(); + client().prepareDelete(indexName, Integer.toString(doc)).get(); } client().admin().indices().prepareRefresh(indexName).get(); } @@ -493,7 +493,8 @@ public void testIndicesDeletedFromRepository() throws Exception { protected void addRandomDocuments(String name, int numDocs) throws InterruptedException { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - indexRequestBuilders[i] = client().prepareIndex(name, name, Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex(name) + .setId(Integer.toString(i)) .setRouting(randomAlphaOfLength(randomIntBetween(1, 10))) .setSource("field", "value"); } diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java index b08edf1028607..cbeefa7349e16 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java @@ -175,16 +175,16 @@ public abstract class AggregatorTestCase extends OpenSearchTestCase { protected ValuesSourceRegistry valuesSourceRegistry; // A list of field types that should not be tested, or are not currently supported - private static List TYPE_TEST_BLACKLIST; + private static List TYPE_TEST_DENYLIST; static { - List blacklist = new ArrayList<>(); - blacklist.add(ObjectMapper.CONTENT_TYPE); // Cannot aggregate objects - blacklist.add(GeoShapeFieldMapper.CONTENT_TYPE); // Cannot aggregate geoshapes (yet) - blacklist.add(ObjectMapper.NESTED_CONTENT_TYPE); // TODO support for nested - blacklist.add(CompletionFieldMapper.CONTENT_TYPE); // TODO support completion - blacklist.add(FieldAliasMapper.CONTENT_TYPE); // TODO support alias - TYPE_TEST_BLACKLIST = blacklist; + List denylist = new ArrayList<>(); + denylist.add(ObjectMapper.CONTENT_TYPE); // Cannot aggregate objects + denylist.add(GeoShapeFieldMapper.CONTENT_TYPE); // Cannot aggregate geoshapes (yet) + denylist.add(ObjectMapper.NESTED_CONTENT_TYPE); // TODO support for nested + denylist.add(CompletionFieldMapper.CONTENT_TYPE); // TODO support completion + denylist.add(FieldAliasMapper.CONTENT_TYPE); // TODO support alias + TYPE_TEST_DENYLIST = denylist; } /** @@ -713,11 +713,11 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy } /** - * A method that allows implementors to specifically blacklist particular field types (based on their content_name). + * A method that allows implementors to specifically denylist particular field types (based on their content_name). * This is needed in some areas where the ValuesSourceType is not granular enough, for example integer values * vs floating points, or `keyword` bytes vs `binary` bytes (which are not searchable) * - * This is a blacklist instead of a whitelist because there are vastly more field types than ValuesSourceTypes, + * This is a denylist instead of an allowlist because there are vastly more field types than ValuesSourceTypes, * and it's expected that these unsupported cases are exceptional rather than common */ protected List unsupportedMappedFieldTypes() { @@ -748,7 +748,7 @@ public void testSupportedFieldTypes() throws IOException { for (Map.Entry mappedType : mapperRegistry.getMapperParsers().entrySet()) { // Some field types should not be tested, or require more work and are not ready yet - if (TYPE_TEST_BLACKLIST.contains(mappedType.getKey())) { + if (TYPE_TEST_DENYLIST.contains(mappedType.getKey())) { continue; } diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java index 4113fa89487e2..495eb73e3f39a 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java @@ -53,7 +53,8 @@ public void setupSuiteScopeCluster() throws Exception { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { // TODO randomize the size and the params in here? builders.add( - client().prepareIndex("idx", "type", String.valueOf(i)) + client().prepareIndex("idx") + .setId(String.valueOf(i)) .setSource( jsonBuilder().startObject() .field("value", i + 1) @@ -79,7 +80,8 @@ public void setupSuiteScopeCluster() throws Exception { builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", String.valueOf(i)) + client().prepareIndex("empty_bucket_idx") + .setId(String.valueOf(i)) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java index 3511a1d750738..3a55848c46150 100644 --- a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -444,7 +444,7 @@ protected void indexRandomDocs(String index, int numdocs) throws InterruptedExce logger.info("--> indexing [{}] documents into [{}]", numdocs, index); IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(index, "_doc").setId(Integer.toString(i)).setSource("field1", "bar " + i); + builders[i] = client().prepareIndex(index).setId(Integer.toString(i)).setSource("field1", "bar " + i); } indexRandom(true, builders); flushAndRefresh(index); diff --git a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java index 5f585434a4e32..e5d14333de828 100644 --- a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java @@ -439,7 +439,6 @@ public void onRemoval(ShardId shardId, Accountable accountable) { new CompressedXContent( Strings.toString( PutMappingRequest.buildFromSimplifiedDef( - "_doc", TEXT_FIELD_NAME, "type=text", KEYWORD_FIELD_NAME, diff --git a/test/framework/src/main/java/org/opensearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/opensearch/test/BackgroundIndexer.java index 62fdd2d7314c0..f7271a5fe8c20 100644 --- a/test/framework/src/main/java/org/opensearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/opensearch/test/BackgroundIndexer.java @@ -170,10 +170,10 @@ public void run() { for (int i = 0; i < batchSize; i++) { id = idGenerator.incrementAndGet(); if (useAutoGeneratedIDs) { - bulkRequest.add(client.prepareIndex(index, type).setSource(generateSource(id, threadRandom))); + bulkRequest.add(client.prepareIndex(index).setSource(generateSource(id, threadRandom))); } else { bulkRequest.add( - client.prepareIndex(index, type, Long.toString(id)).setSource(generateSource(id, threadRandom)) + client.prepareIndex(index).setId(Long.toString(id)).setSource(generateSource(id, threadRandom)) ); } } @@ -201,7 +201,7 @@ public void run() { id = idGenerator.incrementAndGet(); if (useAutoGeneratedIDs) { try { - IndexResponse indexResponse = client.prepareIndex(index, type) + IndexResponse indexResponse = client.prepareIndex(index) .setTimeout(timeout) .setSource(generateSource(id, threadRandom)) .get(); @@ -214,7 +214,8 @@ public void run() { } } else { try { - IndexResponse indexResponse = client.prepareIndex(index, type, Long.toString(id)) + IndexResponse indexResponse = client.prepareIndex(index) + .setId(Long.toString(id)) .setTimeout(timeout) .setSource(generateSource(id, threadRandom)) .get(); diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index dbc6dd012daee..b9ee655dcc979 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -1096,7 +1096,7 @@ protected void ensureClusterStateConsistency() throws IOException { // remove local node reference masterClusterState = ClusterState.Builder.fromBytes(masterClusterStateBytes, null, namedWriteableRegistry); Map masterStateMap = convertToMap(masterClusterState); - int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length; + int masterClusterStateSize = masterClusterState.toString().length(); String masterId = masterClusterState.nodes().getMasterNodeId(); for (Client client : cluster().getClients()) { ClusterState localClusterState = client.admin().cluster().prepareState().all().setLocal(true).get().getState(); @@ -1104,7 +1104,7 @@ protected void ensureClusterStateConsistency() throws IOException { // remove local node reference localClusterState = ClusterState.Builder.fromBytes(localClusterStateBytes, null, namedWriteableRegistry); final Map localStateMap = convertToMap(localClusterState); - final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length; + final int localClusterStateSize = localClusterState.toString().length(); // Check that the non-master node has the same version of the cluster state as the master and // that the master node matches the master (otherwise there is no requirement for the cluster state to match) if (masterClusterState.version() == localClusterState.version() @@ -1112,7 +1112,10 @@ protected void ensureClusterStateConsistency() throws IOException { try { assertEquals("cluster state UUID does not match", masterClusterState.stateUUID(), localClusterState.stateUUID()); // We cannot compare serialization bytes since serialization order of maps is not guaranteed - // but we can compare serialization sizes - they should be the same + // We also cannot compare byte array size because CompressedXContent's DeflateCompressor uses + // a synced flush that can affect the size of the compressed byte array + // (see: DeflateCompressedXContentTests#testDifferentCompressedRepresentation for an example) + // instead we compare the string length of cluster state - they should be the same assertEquals("cluster state size does not match", masterClusterStateSize, localClusterStateSize); // Compare JSON serialization assertNull( @@ -1331,8 +1334,9 @@ protected void ensureFullyConnectedCluster() { * client().prepareIndex(index, type).setSource(source).execute().actionGet(); * */ + @Deprecated protected final IndexResponse index(String index, String type, XContentBuilder source) { - return client().prepareIndex(index, type).setSource(source).execute().actionGet(); + return client().prepareIndex(index).setSource(source).execute().actionGet(); } /** @@ -1342,7 +1346,7 @@ protected final IndexResponse index(String index, String type, XContentBuilder s * */ protected final IndexResponse index(String index, String type, String id, Map source) { - return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); + return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } /** @@ -1351,8 +1355,9 @@ protected final IndexResponse index(String index, String type, String id, Map */ + @Deprecated protected final IndexResponse index(String index, String type, String id, XContentBuilder source) { - return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); + return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } /** @@ -1361,8 +1366,9 @@ protected final IndexResponse index(String index, String type, String id, XConte * return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); * */ + @Deprecated protected final IndexResponse index(String index, String type, String id, Object... source) { - return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); + return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } /** @@ -1373,8 +1379,9 @@ protected final IndexResponse index(String index, String type, String id, Object *

        * where source is a JSON String. */ + @Deprecated protected final IndexResponse index(String index, String type, String id, String source) { - return client().prepareIndex(index, type, id).setSource(source, XContentType.JSON).execute().actionGet(); + return client().prepareIndex(index).setId(id).setSource(source, XContentType.JSON).execute().actionGet(); } /** @@ -1605,7 +1612,7 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean ma assertEquals( "failed to delete a dummy doc [" + doc.get(0) + "][" + doc.get(1) + "]", DocWriteResponse.Result.DELETED, - client().prepareDelete(doc.get(0), null, doc.get(1)).setRouting(doc.get(1)).get().getResult() + client().prepareDelete(doc.get(0), doc.get(1)).setRouting(doc.get(1)).get().getResult() ); } } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index 960400019f7ea..83e59e1edd8c8 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -304,7 +304,9 @@ protected IndexService createIndex(String index, Settings settings) { /** * Create a new index on the singleton node with the provided index settings. + * @deprecated types are being removed */ + @Deprecated protected IndexService createIndex(String index, Settings settings, String type, XContentBuilder mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); if (type != null && mappings != null) { @@ -315,7 +317,9 @@ protected IndexService createIndex(String index, Settings settings, String type, /** * Create a new index on the singleton node with the provided index settings. + * @deprecated types are being removed */ + @Deprecated protected IndexService createIndex(String index, Settings settings, String type, Object... mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); if (type != null) { diff --git a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java index 9603b63337842..27369e79e5dee 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java @@ -968,10 +968,7 @@ protected static void createIndex(String name, Settings settings, String mapping entity += "}"; if (settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); - } else if (settings.hasValue(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey()) - || settings.hasValue(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey())) { - expectTranslogRetentionWarning(request); - } + } request.setJsonEntity(entity); client().performRequest(request); } @@ -1025,21 +1022,6 @@ protected static void expectSoftDeletesWarning(Request request, String indexName } } - protected static void expectTranslogRetentionWarning(Request request) { - final List expectedWarnings = Collections.singletonList( - "Translog retention settings [index.translog.retention.age] " - + "and [index.translog.retention.size] are deprecated and effectively ignored. They will be removed in a future version." - ); - final Builder requestOptions = RequestOptions.DEFAULT.toBuilder(); - if (nodeVersions.stream().allMatch(version -> version.onOrAfter(LegacyESVersion.V_7_7_0))) { - requestOptions.setWarningsHandler(warnings -> warnings.equals(expectedWarnings) == false); - request.setOptions(requestOptions); - } else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(LegacyESVersion.V_7_7_0))) { - requestOptions.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false); - request.setOptions(requestOptions); - } - } - protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java index 8a2e9deb424aa..15510e368b1f5 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java @@ -34,7 +34,7 @@ import java.util.regex.Pattern; /** - * Matches blacklist patterns. + * Matches denylist patterns. * * Currently the following syntax is supported: * @@ -45,20 +45,20 @@ * indices.get/10_basic/advanced/allow_no_indices (contains an additional segment) * * - * Each blacklist pattern is a suffix match on the path. Empty patterns are not allowed. + * Each denylist pattern is a suffix match on the path. Empty patterns are not allowed. */ final class BlacklistedPathPatternMatcher { private final Pattern pattern; /** - * Constructs a new BlacklistedPathPatternMatcher instance from the provided suffix pattern. + * Constructs a new DenylistedPathPatternMatcher instance from the provided suffix pattern. * * @param p The suffix pattern. Must be a non-empty string. */ BlacklistedPathPatternMatcher(String p) { // guard against accidentally matching everything as an empty string lead to the pattern ".*" which matches everything if (p == null || p.trim().isEmpty()) { - throw new IllegalArgumentException("Empty blacklist patterns are not supported"); + throw new IllegalArgumentException("Empty denylist patterns are not supported"); } // very simple transformation from wildcard to a proper regex String finalPattern = p.replaceAll("\\*", "[^/]*") // support wildcard matches (within a single path segment) diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 9a611231a9fa0..4c3a1ec863d31 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -39,7 +39,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.client.NodeSelector; import org.opensearch.common.bytes.BytesReference; @@ -54,8 +53,6 @@ import java.util.List; import java.util.Map; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - /** * Execution context passed across the REST tests. * Holds the REST client used to communicate with opensearch. @@ -121,10 +118,6 @@ public ClientYamlTestResponse callApi( } } - if (esVersion().before(LegacyESVersion.V_7_0_0)) { - adaptRequestForOlderVersion(apiName, bodies, requestParams); - } - HttpEntity entity = createEntity(bodies, requestHeaders); try { response = callApiInternal(apiName, requestParams, entity, requestHeaders, nodeSelector); @@ -140,62 +133,6 @@ public ClientYamlTestResponse callApi( } } - /** - * To allow tests to run against a mixed 7.x/6.x cluster, we make certain modifications to the - * request related to types. - * - * Specifically, we generally use typeless index creation and document writes in test set-up code. - * This functionality is supported in 7.x, but is not supported in 6.x (or is not the default - * behavior). Here we modify the request so that it will work against a 6.x node. - */ - private void adaptRequestForOlderVersion(String apiName, List> bodies, Map requestParams) { - // For index creations, we specify 'include_type_name=false' if it is not explicitly set. This - // allows us to omit the parameter in the test description, while still being able to communicate - // with 6.x nodes where include_type_name defaults to 'true'. - if (apiName.equals("indices.create") && requestParams.containsKey(INCLUDE_TYPE_NAME_PARAMETER) == false) { - requestParams.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - } - - // We add the type to the document API requests if it's not already included. - if ((apiName.equals("index") || apiName.equals("update") || apiName.equals("delete") || apiName.equals("get")) - && requestParams.containsKey("type") == false) { - requestParams.put("type", "_doc"); - } - - // We also add the type to the bulk API requests if it's not already included. The type can either - // be on the request parameters or in the action metadata in the body of the request so we need to - // be sensitive to both scenarios. - if (apiName.equals("bulk") && requestParams.containsKey("type") == false) { - if (requestParams.containsKey("index")) { - requestParams.put("type", "_doc"); - } else { - for (int i = 0; i < bodies.size(); i++) { - Map body = bodies.get(i); - Map actionMetadata; - if (body.containsKey("index")) { - actionMetadata = (Map) body.get("index"); - i++; - } else if (body.containsKey("create")) { - actionMetadata = (Map) body.get("create"); - i++; - } else if (body.containsKey("update")) { - actionMetadata = (Map) body.get("update"); - i++; - } else if (body.containsKey("delete")) { - actionMetadata = (Map) body.get("delete"); - } else { - // action metadata is malformed so leave it malformed since - // the test is probably testing for malformed action metadata - continue; - } - if (actionMetadata.containsKey("_type") == false) { - actionMetadata.put("_type", "_doc"); - } - } - } - } - } - private HttpEntity createEntity(List> bodies, Map headers) throws IOException { if (bodies.isEmpty()) { return null; diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java index 375103d2c1d0f..ca2659e9523e6 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java @@ -88,13 +88,13 @@ public abstract class OpenSearchClientYamlSuiteTestCase extends OpenSearchRestTe */ public static final String REST_TESTS_SUITE = "tests.rest.suite"; /** - * Property that allows to blacklist some of the REST tests based on a comma separated list of globs + * Property that allows to denylist some of the REST tests based on a comma separated list of globs * e.g. "-Dtests.rest.blacklist=get/10_basic/*" */ public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist"; /** - * We use tests.rest.blacklist in build files to blacklist tests; this property enables a user to add additional blacklisted tests on - * top of the tests blacklisted in the build. + * We use tests.rest.blacklist in build files to denylist tests; this property enables a user to add additional denylisted tests on + * top of the tests denylisted in the build. */ public static final String REST_TESTS_BLACKLIST_ADDITIONS = "tests.rest.blacklist_additions"; /** @@ -116,7 +116,7 @@ public abstract class OpenSearchClientYamlSuiteTestCase extends OpenSearchRestTe */ private static final String PATHS_SEPARATOR = "(? blacklistPathMatchers; + private static List denylistPathMatchers; private static ClientYamlTestExecutionContext restTestExecutionContext; private static ClientYamlTestExecutionContext adminExecutionContext; private static ClientYamlTestClient clientYamlTestClient; @@ -138,7 +138,7 @@ public static void initializeUseDefaultNumberOfShards() { public void initAndResetContext() throws Exception { if (restTestExecutionContext == null) { assert adminExecutionContext == null; - assert blacklistPathMatchers == null; + assert denylistPathMatchers == null; final ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load(SPEC_PATH); validateSpec(restSpec); final List hosts = getClusterHosts(); @@ -149,21 +149,21 @@ public void initAndResetContext() throws Exception { clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, masterVersion); restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, randomizeContentType()); adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, false); - final String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); - blacklistPathMatchers = new ArrayList<>(); - for (final String entry : blacklist) { - blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + final String[] denylist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); + denylistPathMatchers = new ArrayList<>(); + for (final String entry : denylist) { + denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } - final String[] blacklistAdditions = resolvePathsProperty(REST_TESTS_BLACKLIST_ADDITIONS, null); - for (final String entry : blacklistAdditions) { - blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + final String[] denylistAdditions = resolvePathsProperty(REST_TESTS_BLACKLIST_ADDITIONS, null); + for (final String entry : denylistAdditions) { + denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } } assert restTestExecutionContext != null; assert adminExecutionContext != null; - assert blacklistPathMatchers != null; + assert denylistPathMatchers != null; - // admin context must be available for @After always, regardless of whether the test was blacklisted + // admin context must be available for @After always, regardless of whether the test was denylisted adminExecutionContext.clear(); restTestExecutionContext.clear(); @@ -184,7 +184,7 @@ public static void closeClient() throws IOException { try { IOUtils.close(clientYamlTestClient); } finally { - blacklistPathMatchers = null; + denylistPathMatchers = null; restTestExecutionContext = null; adminExecutionContext = null; clientYamlTestClient = null; @@ -355,12 +355,12 @@ protected RequestOptions getCatNodesVersionMasterRequestOptions() { } public void test() throws IOException { - // skip test if it matches one of the blacklist globs - for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { + // skip test if it matches one of the denylist globs + for (BlacklistedPathPatternMatcher denylistedPathMatcher : denylistPathMatchers) { String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); assumeFalse( "[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", - blacklistedPathMatcher.isSuffixMatch(testPath) + denylistedPathMatcher.isSuffixMatch(testPath) ); }