listener
- ) {
- return restHighLevelClient.performRequestAsyncAndParseEntity(
- syncedFlushRequest,
- IndicesRequestConverters::flushSynced,
- options,
- SyncedFlushResponse::fromXContent,
- listener,
- emptySet()
- );
- }
-
/**
* Retrieve the settings of one or more indices.
*
@@ -1915,7 +1867,6 @@ public Cancellable simulateIndexTemplateAsync(
/**
* Validate a potentially expensive query without executing it.
- *
*
* @param validateQueryRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@@ -1934,7 +1885,6 @@ public ValidateQueryResponse validateQuery(ValidateQueryRequest validateQueryReq
/**
* Asynchronously validate a potentially expensive query without executing it.
- *
*
* @param validateQueryRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java
index 727e91fc210cd..9979d18635d05 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java
@@ -42,7 +42,6 @@
import org.opensearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
import org.opensearch.action.admin.indices.flush.FlushRequest;
-import org.opensearch.action.admin.indices.flush.SyncedFlushRequest;
import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.opensearch.action.admin.indices.open.OpenIndexRequest;
import org.opensearch.action.admin.indices.refresh.RefreshRequest;
@@ -322,15 +321,6 @@ static Request flush(FlushRequest flushRequest) {
return request;
}
- static Request flushSynced(SyncedFlushRequest syncedFlushRequest) {
- String[] indices = syncedFlushRequest.indices() == null ? Strings.EMPTY_ARRAY : syncedFlushRequest.indices();
- Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush/synced"));
- RequestConverters.Params parameters = new RequestConverters.Params();
- parameters.withIndicesOptions(syncedFlushRequest.indicesOptions());
- request.addParameters(parameters.asMap());
- return request;
- }
-
static Request forceMerge(ForceMergeRequest forceMergeRequest) {
String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_forcemerge"));
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/IngestClient.java
index 93dd3513a4614..cd304019e771c 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/IngestClient.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/IngestClient.java
@@ -175,7 +175,6 @@ public Cancellable deletePipelineAsync(
/**
* Simulate a pipeline on a set of documents provided in the request
- *
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@@ -194,7 +193,6 @@ public SimulatePipelineResponse simulate(SimulatePipelineRequest request, Reques
/**
* Asynchronously simulate a pipeline on a set of documents provided in the request
- *
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
index f0f33ae1e71fe..3e43963db519f 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
@@ -117,7 +117,7 @@ private RequestConverters() {
}
static Request delete(DeleteRequest deleteRequest) {
- String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
+ String endpoint = endpoint(deleteRequest.index(), deleteRequest.id());
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
Params parameters = new Params();
@@ -185,11 +185,6 @@ static Request bulk(BulkRequest bulkRequest) throws IOException {
if (Strings.hasLength(action.index())) {
metadata.field("_index", action.index());
}
- if (Strings.hasLength(action.type())) {
- if (MapperService.SINGLE_MAPPING_NAME.equals(action.type()) == false) {
- metadata.field("_type", action.type());
- }
- }
if (Strings.hasLength(action.id())) {
metadata.field("_id", action.id());
}
@@ -284,7 +279,7 @@ static Request get(GetRequest getRequest) {
}
private static Request getStyleRequest(String method, GetRequest getRequest) {
- Request request = new Request(method, endpoint(getRequest.index(), getRequest.type(), getRequest.id()));
+ Request request = new Request(method, endpoint(getRequest.index(), getRequest.id()));
Params parameters = new Params();
parameters.withPreference(getRequest.preference());
@@ -315,13 +310,7 @@ private static Request sourceRequest(GetSourceRequest getSourceRequest, String h
parameters.withRealtime(getSourceRequest.realtime());
parameters.withFetchSourceContext(getSourceRequest.fetchSourceContext());
- String optionalType = getSourceRequest.type();
- String endpoint;
- if (optionalType == null) {
- endpoint = endpoint(getSourceRequest.index(), "_source", getSourceRequest.id());
- } else {
- endpoint = endpoint(getSourceRequest.index(), optionalType, getSourceRequest.id(), "_source");
- }
+ String endpoint = endpoint(getSourceRequest.index(), "_source", getSourceRequest.id());
Request request = new Request(httpMethodName, endpoint);
request.addParameters(parameters.asMap());
return request;
@@ -344,11 +333,9 @@ static Request index(IndexRequest indexRequest) {
String endpoint;
if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) {
- endpoint = indexRequest.type().equals(MapperService.SINGLE_MAPPING_NAME)
- ? endpoint(indexRequest.index(), "_create", indexRequest.id())
- : endpoint(indexRequest.index(), indexRequest.type(), indexRequest.id(), "_create");
+ endpoint = endpoint(indexRequest.index(), "_create", indexRequest.id());
} else {
- endpoint = endpoint(indexRequest.index(), indexRequest.type(), indexRequest.id());
+ endpoint = endpoint(indexRequest.index(), indexRequest.id());
}
Request request = new Request(method, endpoint);
@@ -377,9 +364,7 @@ static Request ping() {
}
static Request update(UpdateRequest updateRequest) throws IOException {
- String endpoint = updateRequest.type().equals(MapperService.SINGLE_MAPPING_NAME)
- ? endpoint(updateRequest.index(), "_update", updateRequest.id())
- : endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update");
+ String endpoint = endpoint(updateRequest.index(), "_update", updateRequest.id());
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params parameters = new Params();
@@ -432,7 +417,7 @@ static Request update(UpdateRequest updateRequest) throws IOException {
* for standard searches
*/
static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException {
- Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), searchEndpoint));
+ Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchEndpoint));
Params params = new Params();
addSearchRequestParams(params, searchRequest);
@@ -502,7 +487,7 @@ static Request searchTemplate(SearchTemplateRequest searchTemplateRequest) throw
request = new Request(HttpGet.METHOD_NAME, "_render/template");
} else {
SearchRequest searchRequest = searchTemplateRequest.getRequest();
- String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template");
+ String endpoint = endpoint(searchRequest.indices(), "_search/template");
request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params();
@@ -548,9 +533,7 @@ static Request count(CountRequest countRequest) throws IOException {
}
static Request explain(ExplainRequest explainRequest) throws IOException {
- String endpoint = explainRequest.type().equals(MapperService.SINGLE_MAPPING_NAME)
- ? endpoint(explainRequest.index(), "_explain", explainRequest.id())
- : endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain");
+ String endpoint = endpoint(explainRequest.index(), "_explain", explainRequest.id());
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params();
@@ -633,7 +616,7 @@ private static Request prepareReindexRequest(ReindexRequest reindexRequest, bool
private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest, boolean waitForCompletion)
throws IOException {
- String endpoint = endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
+ String endpoint = endpoint(deleteByQueryRequest.indices(), "_delete_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params().withRouting(deleteByQueryRequest.getRouting())
.withRefresh(deleteByQueryRequest.isRefresh())
@@ -661,7 +644,7 @@ private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteBy
}
static Request prepareUpdateByQueryRequest(UpdateByQueryRequest updateByQueryRequest, boolean waitForCompletion) throws IOException {
- String endpoint = endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
+ String endpoint = endpoint(updateByQueryRequest.indices(), "_update_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params().withRouting(updateByQueryRequest.getRouting())
.withPipeline(updateByQueryRequest.getPipeline())
@@ -799,10 +782,16 @@ static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType,
return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
}
+ static String endpoint(String index, String id) {
+ return new EndpointBuilder().addPathPart(index, MapperService.SINGLE_MAPPING_NAME, id).build();
+ }
+
+ @Deprecated
static String endpoint(String index, String type, String id) {
return new EndpointBuilder().addPathPart(index, type, id).build();
}
+ @Deprecated
static String endpoint(String index, String type, String id, String endpoint) {
return new EndpointBuilder().addPathPart(index, type, id).addPathPartAsIs(endpoint).build();
}
@@ -815,6 +804,7 @@ static String endpoint(String[] indices, String endpoint) {
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).build();
}
+ @Deprecated
static String endpoint(String[] indices, String[] types, String endpoint) {
return new EndpointBuilder().addCommaSeparatedPathParts(indices)
.addCommaSeparatedPathParts(types)
@@ -829,6 +819,7 @@ static String endpoint(String[] indices, String endpoint, String[] suffixes) {
.build();
}
+ @Deprecated
static String endpoint(String[] indices, String endpoint, String type) {
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).addPathPart(type).build();
}
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotClient.java
index c702fcda89e5d..85a793dec24ce 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotClient.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotClient.java
@@ -293,7 +293,6 @@ public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest
/**
* Asynchronously creates a snapshot.
- *
*
* @return cancellable that may be used to cancel the request
*/
@@ -327,7 +326,6 @@ public AcknowledgedResponse clone(CloneSnapshotRequest cloneSnapshotRequest, Req
/**
* Asynchronously clones a snapshot.
- *
*
* @return cancellable that may be used to cancel the request
*/
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SyncedFlushResponse.java b/client/rest-high-level/src/main/java/org/opensearch/client/SyncedFlushResponse.java
deleted file mode 100644
index a0c94fb75579e..0000000000000
--- a/client/rest-high-level/src/main/java/org/opensearch/client/SyncedFlushResponse.java
+++ /dev/null
@@ -1,346 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package org.opensearch.client;
-
-import org.opensearch.common.ParseField;
-import org.opensearch.common.ParsingException;
-import org.opensearch.common.xcontent.ConstructingObjectParser;
-import org.opensearch.common.xcontent.ToXContentFragment;
-import org.opensearch.common.xcontent.ToXContentObject;
-import org.opensearch.common.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentLocation;
-import org.opensearch.common.xcontent.XContentParser;
-import org.opensearch.common.xcontent.XContentParser.Token;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg;
-import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
-import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
-
-public class SyncedFlushResponse implements ToXContentObject {
-
- public static final String SHARDS_FIELD = "_shards";
-
- private ShardCounts totalCounts;
- private Map indexResults;
-
- SyncedFlushResponse(ShardCounts totalCounts, Map indexResults) {
- this.totalCounts = new ShardCounts(totalCounts.total, totalCounts.successful, totalCounts.failed);
- this.indexResults = Collections.unmodifiableMap(indexResults);
- }
-
- /**
- * @return The total number of shard copies that were processed across all indexes
- */
- public int totalShards() {
- return totalCounts.total;
- }
-
- /**
- * @return The number of successful shard copies that were processed across all indexes
- */
- public int successfulShards() {
- return totalCounts.successful;
- }
-
- /**
- * @return The number of failed shard copies that were processed across all indexes
- */
- public int failedShards() {
- return totalCounts.failed;
- }
-
- /**
- * @return A map of results for each index where the keys of the map are the index names
- * and the values are the results encapsulated in {@link IndexResult}.
- */
- public Map getIndexResults() {
- return indexResults;
- }
-
- ShardCounts getShardCounts() {
- return totalCounts;
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject();
- builder.startObject(SHARDS_FIELD);
- totalCounts.toXContent(builder, params);
- builder.endObject();
- for (Map.Entry entry : indexResults.entrySet()) {
- String indexName = entry.getKey();
- IndexResult indexResult = entry.getValue();
- builder.startObject(indexName);
- indexResult.toXContent(builder, params);
- builder.endObject();
- }
- builder.endObject();
- return builder;
- }
-
- public static SyncedFlushResponse fromXContent(XContentParser parser) throws IOException {
- ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser);
- ShardCounts totalCounts = null;
- Map indexResults = new HashMap<>();
- XContentLocation startLoc = parser.getTokenLocation();
- while (parser.nextToken().equals(Token.FIELD_NAME)) {
- if (parser.currentName().equals(SHARDS_FIELD)) {
- ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser);
- totalCounts = ShardCounts.fromXContent(parser);
- } else {
- String indexName = parser.currentName();
- IndexResult indexResult = IndexResult.fromXContent(parser);
- indexResults.put(indexName, indexResult);
- }
- }
- if (totalCounts != null) {
- return new SyncedFlushResponse(totalCounts, indexResults);
- } else {
- throw new ParsingException(startLoc, "Unable to reconstruct object. Total counts for shards couldn't be parsed.");
- }
- }
-
- /**
- * Encapsulates the number of total successful and failed shard copies
- */
- public static final class ShardCounts implements ToXContentFragment {
-
- public static final String TOTAL_FIELD = "total";
- public static final String SUCCESSFUL_FIELD = "successful";
- public static final String FAILED_FIELD = "failed";
-
- private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
- "shardcounts",
- a -> new ShardCounts((Integer) a[0], (Integer) a[1], (Integer) a[2])
- );
- static {
- PARSER.declareInt(constructorArg(), new ParseField(TOTAL_FIELD));
- PARSER.declareInt(constructorArg(), new ParseField(SUCCESSFUL_FIELD));
- PARSER.declareInt(constructorArg(), new ParseField(FAILED_FIELD));
- }
-
- private int total;
- private int successful;
- private int failed;
-
- ShardCounts(int total, int successful, int failed) {
- this.total = total;
- this.successful = successful;
- this.failed = failed;
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field(TOTAL_FIELD, total);
- builder.field(SUCCESSFUL_FIELD, successful);
- builder.field(FAILED_FIELD, failed);
- return builder;
- }
-
- public static ShardCounts fromXContent(XContentParser parser) throws IOException {
- return PARSER.parse(parser, null);
- }
-
- public boolean equals(ShardCounts other) {
- if (other != null) {
- return other.total == this.total && other.successful == this.successful && other.failed == this.failed;
- } else {
- return false;
- }
- }
-
- }
-
- /**
- * Description for the flush/synced results for a particular index.
- * This includes total, successful and failed copies along with failure description for each failed copy.
- */
- public static final class IndexResult implements ToXContentFragment {
-
- public static final String TOTAL_FIELD = "total";
- public static final String SUCCESSFUL_FIELD = "successful";
- public static final String FAILED_FIELD = "failed";
- public static final String FAILURES_FIELD = "failures";
-
- @SuppressWarnings("unchecked")
- private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
- "indexresult",
- a -> new IndexResult((Integer) a[0], (Integer) a[1], (Integer) a[2], (List) a[3])
- );
- static {
- PARSER.declareInt(constructorArg(), new ParseField(TOTAL_FIELD));
- PARSER.declareInt(constructorArg(), new ParseField(SUCCESSFUL_FIELD));
- PARSER.declareInt(constructorArg(), new ParseField(FAILED_FIELD));
- PARSER.declareObjectArray(optionalConstructorArg(), ShardFailure.PARSER, new ParseField(FAILURES_FIELD));
- }
-
- private ShardCounts counts;
- private List failures;
-
- IndexResult(int total, int successful, int failed, List failures) {
- counts = new ShardCounts(total, successful, failed);
- if (failures != null) {
- this.failures = Collections.unmodifiableList(failures);
- } else {
- this.failures = Collections.unmodifiableList(new ArrayList<>());
- }
- }
-
- /**
- * @return The total number of shard copies that were processed for this index.
- */
- public int totalShards() {
- return counts.total;
- }
-
- /**
- * @return The number of successful shard copies that were processed for this index.
- */
- public int successfulShards() {
- return counts.successful;
- }
-
- /**
- * @return The number of failed shard copies that were processed for this index.
- */
- public int failedShards() {
- return counts.failed;
- }
-
- /**
- * @return A list of {@link ShardFailure} objects that describe each of the failed shard copies for this index.
- */
- public List failures() {
- return failures;
- }
-
- ShardCounts getShardCounts() {
- return counts;
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- counts.toXContent(builder, params);
- if (failures.size() > 0) {
- builder.startArray(FAILURES_FIELD);
- for (ShardFailure failure : failures) {
- failure.toXContent(builder, params);
- }
- builder.endArray();
- }
- return builder;
- }
-
- public static IndexResult fromXContent(XContentParser parser) throws IOException {
- return PARSER.parse(parser, null);
- }
- }
-
- /**
- * Description of a failed shard copy for an index.
- */
- public static final class ShardFailure implements ToXContentFragment {
-
- public static String SHARD_ID_FIELD = "shard";
- public static String FAILURE_REASON_FIELD = "reason";
- public static String ROUTING_FIELD = "routing";
-
- private int shardId;
- private String failureReason;
- private Map routing;
-
- @SuppressWarnings("unchecked")
- static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
- "shardfailure",
- a -> new ShardFailure((Integer) a[0], (String) a[1], (Map) a[2])
- );
- static {
- PARSER.declareInt(constructorArg(), new ParseField(SHARD_ID_FIELD));
- PARSER.declareString(constructorArg(), new ParseField(FAILURE_REASON_FIELD));
- PARSER.declareObject(optionalConstructorArg(), (parser, c) -> parser.map(), new ParseField(ROUTING_FIELD));
- }
-
- ShardFailure(int shardId, String failureReason, Map routing) {
- this.shardId = shardId;
- this.failureReason = failureReason;
- if (routing != null) {
- this.routing = Collections.unmodifiableMap(routing);
- } else {
- this.routing = Collections.unmodifiableMap(new HashMap<>());
- }
- }
-
- /**
- * @return Id of the shard whose copy failed
- */
- public int getShardId() {
- return shardId;
- }
-
- /**
- * @return Reason for failure of the shard copy
- */
- public String getFailureReason() {
- return failureReason;
- }
-
- /**
- * @return Additional information about the failure.
- */
- public Map getRouting() {
- return routing;
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject();
- builder.field(SHARD_ID_FIELD, shardId);
- builder.field(FAILURE_REASON_FIELD, failureReason);
- if (routing.size() > 0) {
- builder.field(ROUTING_FIELD, routing);
- }
- builder.endObject();
- return builder;
- }
-
- public static ShardFailure fromXContent(XContentParser parser) throws IOException {
- return PARSER.parse(parser, null);
- }
- }
-}
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/core/TermVectorsResponse.java b/client/rest-high-level/src/main/java/org/opensearch/client/core/TermVectorsResponse.java
index 757e0df6aee77..fa13abf72207e 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/core/TermVectorsResponse.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/core/TermVectorsResponse.java
@@ -46,24 +46,14 @@
public class TermVectorsResponse {
private final String index;
- private final String type;
private final String id;
private final long docVersion;
private final boolean found;
private final long tookInMillis;
private final List termVectorList;
- public TermVectorsResponse(
- String index,
- String type,
- String id,
- long version,
- boolean found,
- long tookInMillis,
- List termVectorList
- ) {
+ public TermVectorsResponse(String index, String id, long version, boolean found, long tookInMillis, List termVectorList) {
this.index = index;
- this.type = type;
this.id = id;
this.docVersion = version;
this.found = found;
@@ -75,19 +65,18 @@ public TermVectorsResponse(
"term_vectors",
true,
args -> {
- // as the response comes from server, we are sure that args[6] will be a list of TermVector
+ // as the response comes from server, we are sure that args[5] will be a list of TermVector
@SuppressWarnings("unchecked")
- List termVectorList = (List) args[6];
+ List termVectorList = (List) args[5];
if (termVectorList != null) {
Collections.sort(termVectorList, Comparator.comparing(TermVector::getFieldName));
}
return new TermVectorsResponse(
(String) args[0],
(String) args[1],
- (String) args[2],
- (long) args[3],
- (boolean) args[4],
- (long) args[5],
+ (long) args[2],
+ (boolean) args[3],
+ (long) args[4],
termVectorList
);
}
@@ -95,7 +84,6 @@ public TermVectorsResponse(
static {
PARSER.declareString(constructorArg(), new ParseField("_index"));
- PARSER.declareString(constructorArg(), new ParseField("_type"));
PARSER.declareString(optionalConstructorArg(), new ParseField("_id"));
PARSER.declareLong(constructorArg(), new ParseField("_version"));
PARSER.declareBoolean(constructorArg(), new ParseField("found"));
@@ -118,16 +106,6 @@ public String getIndex() {
return index;
}
- /**
- * Returns the type for the response
- *
- * @deprecated Types are in the process of being removed.
- */
- @Deprecated
- public String getType() {
- return type;
- }
-
/**
* Returns the id of the request
* can be NULL if there is no document ID
@@ -171,7 +149,6 @@ public boolean equals(Object obj) {
if (!(obj instanceof TermVectorsResponse)) return false;
TermVectorsResponse other = (TermVectorsResponse) obj;
return index.equals(other.index)
- && type.equals(other.type)
&& Objects.equals(id, other.id)
&& docVersion == other.docVersion
&& found == other.found
@@ -181,7 +158,7 @@ public boolean equals(Object obj) {
@Override
public int hashCode() {
- return Objects.hash(index, type, id, docVersion, found, tookInMillis, termVectorList);
+ return Objects.hash(index, id, docVersion, found, tookInMillis, termVectorList);
}
public static final class TermVector {
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java
index cae1298a8793d..cc9abdccf4c9f 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java
@@ -49,11 +49,8 @@
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.index.mapper.MapperService;
-import org.opensearch.rest.action.document.RestBulkAction;
import org.opensearch.search.SearchHit;
import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Arrays;
@@ -69,9 +66,7 @@
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.fieldFromSource;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasId;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasIndex;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasProperty;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasType;
import static org.hamcrest.Matchers.both;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.either;
@@ -96,17 +91,6 @@ private static BulkProcessor.Builder initBulkProcessorBuilder(BulkProcessor.List
);
}
- private static BulkProcessor.Builder initBulkProcessorBuilderUsingTypes(BulkProcessor.Listener listener) {
- return BulkProcessor.builder(
- (request, bulkListener) -> highLevelClient().bulkAsync(
- request,
- expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE),
- bulkListener
- ),
- listener
- );
- }
-
public void testThatBulkProcessorCountIsCorrect() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
BulkProcessorTestListener listener = new BulkProcessorTestListener(latch);
@@ -210,7 +194,6 @@ public void testBulkProcessorConcurrentRequests() throws Exception {
for (BulkItemResponse bulkItemResponse : listener.bulkItems) {
assertThat(bulkItemResponse.getFailureMessage(), bulkItemResponse.isFailed(), equalTo(false));
assertThat(bulkItemResponse.getIndex(), equalTo("test"));
- assertThat(bulkItemResponse.getType(), equalTo("_doc"));
// with concurrent requests > 1 we can't rely on the order of the bulk requests
assertThat(Integer.valueOf(bulkItemResponse.getId()), both(greaterThan(0)).and(lessThanOrEqualTo(numDocs)));
// we do want to check that we don't get duplicate ids back
@@ -317,7 +300,6 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception
Set readOnlyIds = new HashSet<>();
for (BulkItemResponse bulkItemResponse : listener.bulkItems) {
assertThat(bulkItemResponse.getIndex(), either(equalTo("test")).or(equalTo("test-ro")));
- assertThat(bulkItemResponse.getType(), equalTo("_doc"));
if (bulkItemResponse.getIndex().equals("test")) {
assertThat(bulkItemResponse.isFailed(), equalTo(false));
// with concurrent requests > 1 we can't rely on the order of the bulk requests
@@ -346,7 +328,6 @@ public void testGlobalParametersAndSingleRequest() throws Exception {
// tag::bulk-processor-mix-parameters
try (BulkProcessor processor = initBulkProcessorBuilder(listener)
.setGlobalIndex("tweets")
- .setGlobalType("_doc")
.setGlobalRouting("routing")
.setGlobalPipeline("pipeline_id")
.build()) {
@@ -373,85 +354,9 @@ public void testGlobalParametersAndBulkProcessor() throws Exception {
createIndexWithMultipleShards("test");
createFieldAddingPipleine("pipeline_id", "fieldNameXYZ", "valueXYZ");
- final String customType = "testType";
- final String ignoredType = "ignoredType";
int numDocs = randomIntBetween(10, 10);
{
- final CountDownLatch latch = new CountDownLatch(1);
- BulkProcessorTestListener listener = new BulkProcessorTestListener(latch);
- // Check that untyped document additions inherit the global type
- String globalType = customType;
- String localType = null;
- try (
- BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener)
- // let's make sure that the bulk action limit trips, one single execution will index all the documents
- .setConcurrentRequests(randomIntBetween(0, 1))
- .setBulkActions(numDocs)
- .setFlushInterval(TimeValue.timeValueHours(24))
- .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
- .setGlobalIndex("test")
- .setGlobalType(globalType)
- .setGlobalRouting("routing")
- .setGlobalPipeline("pipeline_id")
- .build()
- ) {
-
- indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id");
- latch.await();
-
- assertThat(listener.beforeCounts.get(), equalTo(1));
- assertThat(listener.afterCounts.get(), equalTo(1));
- assertThat(listener.bulkFailures.size(), equalTo(0));
- assertResponseItems(listener.bulkItems, numDocs, globalType);
-
- Iterable hits = searchAll(new SearchRequest("test").routing("routing"));
-
- assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ"))));
- assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(globalType))));
- assertThat(hits, containsInAnyOrder(expectedIds(numDocs)));
- }
-
- }
- {
- // Check that typed document additions don't inherit the global type
- String globalType = ignoredType;
- String localType = customType;
- final CountDownLatch latch = new CountDownLatch(1);
- BulkProcessorTestListener listener = new BulkProcessorTestListener(latch);
- try (
- BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener)
- // let's make sure that the bulk action limit trips, one single execution will index all the documents
- .setConcurrentRequests(randomIntBetween(0, 1))
- .setBulkActions(numDocs)
- .setFlushInterval(TimeValue.timeValueHours(24))
- .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
- .setGlobalIndex("test")
- .setGlobalType(globalType)
- .setGlobalRouting("routing")
- .setGlobalPipeline("pipeline_id")
- .build()
- ) {
- indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id");
- latch.await();
-
- assertThat(listener.beforeCounts.get(), equalTo(1));
- assertThat(listener.afterCounts.get(), equalTo(1));
- assertThat(listener.bulkFailures.size(), equalTo(0));
- assertResponseItems(listener.bulkItems, numDocs, localType);
-
- Iterable hits = searchAll(new SearchRequest("test").routing("routing"));
-
- assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ"))));
- assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(localType))));
- assertThat(hits, containsInAnyOrder(expectedIds(numDocs)));
- }
- }
- {
- // Check that untyped document additions and untyped global inherit the established custom type
- // (the custom document type introduced to the mapping by the earlier code in this test)
- String globalType = null;
- String localType = null;
final CountDownLatch latch = new CountDownLatch(1);
BulkProcessorTestListener listener = new BulkProcessorTestListener(latch);
try (
@@ -462,23 +367,22 @@ public void testGlobalParametersAndBulkProcessor() throws Exception {
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.setGlobalIndex("test")
- .setGlobalType(globalType)
.setGlobalRouting("routing")
.setGlobalPipeline("pipeline_id")
.build()
) {
- indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id");
+
+ indexDocs(processor, numDocs, null, "test", "pipeline_id");
latch.await();
assertThat(listener.beforeCounts.get(), equalTo(1));
assertThat(listener.afterCounts.get(), equalTo(1));
assertThat(listener.bulkFailures.size(), equalTo(0));
- assertResponseItems(listener.bulkItems, numDocs, MapperService.SINGLE_MAPPING_NAME);
+ assertResponseItems(listener.bulkItems, numDocs);
Iterable hits = searchAll(new SearchRequest("test").routing("routing"));
assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ"))));
- assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(customType))));
assertThat(hits, containsInAnyOrder(expectedIds(numDocs)));
}
}
@@ -489,45 +393,31 @@ private Matcher[] expectedIds(int numDocs) {
return IntStream.rangeClosed(1, numDocs).boxed().map(n -> hasId(n.toString())).>toArray(Matcher[]::new);
}
- private MultiGetRequest indexDocs(
- BulkProcessor processor,
- int numDocs,
- String localIndex,
- String localType,
- String globalIndex,
- String globalType,
- String globalPipeline
- ) throws Exception {
+ private MultiGetRequest indexDocs(BulkProcessor processor, int numDocs, String localIndex, String globalIndex, String globalPipeline)
+ throws Exception {
MultiGetRequest multiGetRequest = new MultiGetRequest();
for (int i = 1; i <= numDocs; i++) {
if (randomBoolean()) {
processor.add(
- new IndexRequest(localIndex, localType, Integer.toString(i)).source(
- XContentType.JSON,
- "field",
- randomRealisticUnicodeOfLengthBetween(1, 30)
- )
+ new IndexRequest(localIndex).id(Integer.toString(i))
+ .source(XContentType.JSON, "field", randomRealisticUnicodeOfLengthBetween(1, 30))
);
} else {
- BytesArray data = bytesBulkRequest(localIndex, localType, i);
- processor.add(data, globalIndex, globalType, globalPipeline, XContentType.JSON);
+ BytesArray data = bytesBulkRequest(localIndex, i);
+ processor.add(data, globalIndex, globalPipeline, XContentType.JSON);
}
multiGetRequest.add(localIndex, Integer.toString(i));
}
return multiGetRequest;
}
- private static BytesArray bytesBulkRequest(String localIndex, String localType, int id) throws IOException {
+ private static BytesArray bytesBulkRequest(String localIndex, int id) throws IOException {
XContentBuilder action = jsonBuilder().startObject().startObject("index");
if (localIndex != null) {
action.field("_index", localIndex);
}
- if (localType != null) {
- action.field("_type", localType);
- }
-
action.field("_id", Integer.toString(id));
action.endObject().endObject();
@@ -538,19 +428,14 @@ private static BytesArray bytesBulkRequest(String localIndex, String localType,
}
private MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) throws Exception {
- return indexDocs(processor, numDocs, "test", null, null, null, null);
+ return indexDocs(processor, numDocs, "test", null, null);
}
private static void assertResponseItems(List bulkItemResponses, int numDocs) {
- assertResponseItems(bulkItemResponses, numDocs, MapperService.SINGLE_MAPPING_NAME);
- }
-
- private static void assertResponseItems(List bulkItemResponses, int numDocs, String expectedType) {
assertThat(bulkItemResponses.size(), is(numDocs));
int i = 1;
for (BulkItemResponse bulkItemResponse : bulkItemResponses) {
assertThat(bulkItemResponse.getIndex(), equalTo("test"));
- assertThat(bulkItemResponse.getType(), equalTo(expectedType));
assertThat(bulkItemResponse.getId(), equalTo(Integer.toString(i++)));
assertThat(
"item " + i + " failed with cause: " + bulkItemResponse.getFailureMessage(),
@@ -565,7 +450,6 @@ private static void assertMultiGetResponse(MultiGetResponse multiGetResponse, in
int i = 1;
for (MultiGetItemResponse multiGetItemResponse : multiGetResponse) {
assertThat(multiGetItemResponse.getIndex(), equalTo("test"));
- assertThat(multiGetItemResponse.getType(), equalTo("_doc"));
assertThat(multiGetItemResponse.getId(), equalTo(Integer.toString(i++)));
}
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/BulkRequestWithGlobalParametersIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/BulkRequestWithGlobalParametersIT.java
index d42cb7abe2c4c..35fc9d88e316c 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/BulkRequestWithGlobalParametersIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/BulkRequestWithGlobalParametersIT.java
@@ -37,7 +37,6 @@
import org.opensearch.action.index.IndexRequest;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.rest.action.document.RestBulkAction;
import org.opensearch.search.SearchHit;
import java.io.IOException;
@@ -46,7 +45,6 @@
import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasId;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasIndex;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasProperty;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasType;
import static org.hamcrest.Matchers.both;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.emptyIterable;
@@ -117,7 +115,7 @@ public void testMixPipelineOnRequestAndGlobal() throws IOException {
}
public void testGlobalIndex() throws IOException {
- BulkRequest request = new BulkRequest("global_index", null);
+ BulkRequest request = new BulkRequest("global_index");
request.add(new IndexRequest().id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest().id("2").source(XContentType.JSON, "field", "bulk2"));
@@ -129,7 +127,7 @@ public void testGlobalIndex() throws IOException {
@SuppressWarnings("unchecked")
public void testIndexGlobalAndPerRequest() throws IOException {
- BulkRequest request = new BulkRequest("global_index", null);
+ BulkRequest request = new BulkRequest("global_index");
request.add(new IndexRequest("local_index").id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(
new IndexRequest().id("2") // will take global index
@@ -142,31 +140,6 @@ public void testIndexGlobalAndPerRequest() throws IOException {
assertThat(hits, containsInAnyOrder(both(hasId("1")).and(hasIndex("local_index")), both(hasId("2")).and(hasIndex("global_index"))));
}
- public void testGlobalType() throws IOException {
- BulkRequest request = new BulkRequest(null, "global_type");
- request.add(new IndexRequest("index").id("1").source(XContentType.JSON, "field", "bulk1"));
- request.add(new IndexRequest("index").id("2").source(XContentType.JSON, "field", "bulk2"));
-
- bulkWithTypes(request);
-
- Iterable hits = searchAll("index");
- assertThat(hits, everyItem(hasType("global_type")));
- }
-
- public void testTypeGlobalAndPerRequest() throws IOException {
- BulkRequest request = new BulkRequest(null, "global_type");
- request.add(new IndexRequest("index1", "local_type", "1").source(XContentType.JSON, "field", "bulk1"));
- request.add(
- new IndexRequest("index2").id("2") // will take global type
- .source(XContentType.JSON, "field", "bulk2")
- );
-
- bulkWithTypes(request);
-
- Iterable hits = searchAll("index1", "index2");
- assertThat(hits, containsInAnyOrder(both(hasId("1")).and(hasType("local_type")), both(hasId("2")).and(hasType("global_type"))));
- }
-
public void testGlobalRouting() throws IOException {
createIndexWithMultipleShards("index");
BulkRequest request = new BulkRequest((String) null);
@@ -194,28 +167,6 @@ public void testMixLocalAndGlobalRouting() throws IOException {
assertThat(hits, containsInAnyOrder(hasId("1"), hasId("2")));
}
- public void testGlobalIndexNoTypes() throws IOException {
- BulkRequest request = new BulkRequest("global_index");
- request.add(new IndexRequest().id("1").source(XContentType.JSON, "field", "bulk1"));
- request.add(new IndexRequest().id("2").source(XContentType.JSON, "field", "bulk2"));
-
- bulk(request);
-
- Iterable hits = searchAll("global_index");
- assertThat(hits, everyItem(hasIndex("global_index")));
- }
-
- private BulkResponse bulkWithTypes(BulkRequest request) throws IOException {
- BulkResponse bulkResponse = execute(
- request,
- highLevelClient()::bulk,
- highLevelClient()::bulkAsync,
- expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE)
- );
- assertFalse(bulkResponse.hasFailures());
- return bulkResponse;
- }
-
private BulkResponse bulk(BulkRequest request) throws IOException {
BulkResponse bulkResponse = execute(request, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT);
assertFalse(bulkResponse.hasFailures());
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java
index 048e2060bb826..999c2a0e7643b 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java
@@ -69,12 +69,6 @@
import org.opensearch.index.VersionType;
import org.opensearch.index.get.GetResult;
import org.opensearch.rest.RestStatus;
-import org.opensearch.rest.action.document.RestBulkAction;
-import org.opensearch.rest.action.document.RestDeleteAction;
-import org.opensearch.rest.action.document.RestGetAction;
-import org.opensearch.rest.action.document.RestIndexAction;
-import org.opensearch.rest.action.document.RestMultiGetAction;
-import org.opensearch.rest.action.document.RestUpdateAction;
import org.opensearch.script.Script;
import org.opensearch.script.ScriptType;
import org.opensearch.search.fetch.subphase.FetchSourceContext;
@@ -114,7 +108,6 @@ public void testDelete() throws IOException {
}
DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
assertEquals("index", deleteResponse.getIndex());
- assertEquals("_doc", deleteResponse.getType());
assertEquals(docId, deleteResponse.getId());
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
}
@@ -124,7 +117,6 @@ public void testDelete() throws IOException {
DeleteRequest deleteRequest = new DeleteRequest("index", docId);
DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
assertEquals("index", deleteResponse.getIndex());
- assertEquals("_doc", deleteResponse.getType());
assertEquals(docId, deleteResponse.getId());
assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult());
}
@@ -163,7 +155,6 @@ public void testDelete() throws IOException {
DeleteRequest deleteRequest = new DeleteRequest("index", docId).versionType(VersionType.EXTERNAL).version(13);
DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
assertEquals("index", deleteResponse.getIndex());
- assertEquals("_doc", deleteResponse.getType());
assertEquals(docId, deleteResponse.getId());
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
}
@@ -200,37 +191,11 @@ public void testDelete() throws IOException {
DeleteRequest deleteRequest = new DeleteRequest("index", docId).routing("foo");
DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
assertEquals("index", deleteResponse.getIndex());
- assertEquals("_doc", deleteResponse.getType());
assertEquals(docId, deleteResponse.getId());
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
}
}
- public void testDeleteWithTypes() throws IOException {
- String docId = "id";
- IndexRequest indexRequest = new IndexRequest("index", "type", docId);
- indexRequest.source(Collections.singletonMap("foo", "bar"));
- execute(
- indexRequest,
- highLevelClient()::index,
- highLevelClient()::indexAsync,
- expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
- );
-
- DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId);
- DeleteResponse deleteResponse = execute(
- deleteRequest,
- highLevelClient()::delete,
- highLevelClient()::deleteAsync,
- expectWarningsOnce(RestDeleteAction.TYPES_DEPRECATION_MESSAGE)
- );
-
- assertEquals("index", deleteResponse.getIndex());
- assertEquals("type", deleteResponse.getType());
- assertEquals(docId, deleteResponse.getId());
- assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
- }
-
public void testExists() throws IOException {
{
GetRequest getRequest = new GetRequest("index", "id");
@@ -366,7 +331,6 @@ public void testGet() throws IOException {
}
GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
assertEquals("index", getResponse.getIndex());
- assertEquals("_doc", getResponse.getType());
assertEquals("id", getResponse.getId());
assertTrue(getResponse.isExists());
assertFalse(getResponse.isSourceEmpty());
@@ -377,7 +341,6 @@ public void testGet() throws IOException {
GetRequest getRequest = new GetRequest("index", "does_not_exist");
GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
assertEquals("index", getResponse.getIndex());
- assertEquals("_doc", getResponse.getType());
assertEquals("does_not_exist", getResponse.getId());
assertFalse(getResponse.isExists());
assertEquals(-1, getResponse.getVersion());
@@ -389,7 +352,6 @@ public void testGet() throws IOException {
getRequest.fetchSourceContext(new FetchSourceContext(false, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY));
GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
assertEquals("index", getResponse.getIndex());
- assertEquals("_doc", getResponse.getType());
assertEquals("id", getResponse.getId());
assertTrue(getResponse.isExists());
assertTrue(getResponse.isSourceEmpty());
@@ -405,7 +367,6 @@ public void testGet() throws IOException {
}
GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
assertEquals("index", getResponse.getIndex());
- assertEquals("_doc", getResponse.getType());
assertEquals("id", getResponse.getId());
assertTrue(getResponse.isExists());
assertFalse(getResponse.isSourceEmpty());
@@ -416,36 +377,6 @@ public void testGet() throws IOException {
}
}
- public void testGetWithTypes() throws IOException {
- String document = "{\"field\":\"value\"}";
- IndexRequest indexRequest = new IndexRequest("index", "type", "id");
- indexRequest.source(document, XContentType.JSON);
- indexRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
- execute(
- indexRequest,
- highLevelClient()::index,
- highLevelClient()::indexAsync,
- expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
- );
-
- GetRequest getRequest = new GetRequest("index", "type", "id");
- GetResponse getResponse = execute(
- getRequest,
- highLevelClient()::get,
- highLevelClient()::getAsync,
- expectWarningsOnce(RestGetAction.TYPES_DEPRECATION_MESSAGE)
- );
-
- assertEquals("index", getResponse.getIndex());
- assertEquals("type", getResponse.getType());
- assertEquals("id", getResponse.getId());
-
- assertTrue(getResponse.isExists());
- assertFalse(getResponse.isSourceEmpty());
- assertEquals(1L, getResponse.getVersion());
- assertEquals(document, getResponse.getSourceAsString());
- }
-
public void testMultiGet() throws IOException {
{
MultiGetRequest multiGetRequest = new MultiGetRequest();
@@ -457,7 +388,6 @@ public void testMultiGet() throws IOException {
assertTrue(response.getResponses()[0].isFailed());
assertNull(response.getResponses()[0].getResponse());
assertEquals("id1", response.getResponses()[0].getFailure().getId());
- assertNull(response.getResponses()[0].getFailure().getType());
assertEquals("index", response.getResponses()[0].getFailure().getIndex());
assertEquals(
"OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]",
@@ -467,7 +397,6 @@ public void testMultiGet() throws IOException {
assertTrue(response.getResponses()[1].isFailed());
assertNull(response.getResponses()[1].getResponse());
assertEquals("id2", response.getResponses()[1].getId());
- assertNull(response.getResponses()[1].getType());
assertEquals("index", response.getResponses()[1].getIndex());
assertEquals(
"OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]",
@@ -493,47 +422,26 @@ public void testMultiGet() throws IOException {
assertFalse(response.getResponses()[0].isFailed());
assertNull(response.getResponses()[0].getFailure());
assertEquals("id1", response.getResponses()[0].getId());
- assertEquals("_doc", response.getResponses()[0].getType());
assertEquals("index", response.getResponses()[0].getIndex());
assertEquals(Collections.singletonMap("field", "value1"), response.getResponses()[0].getResponse().getSource());
assertFalse(response.getResponses()[1].isFailed());
assertNull(response.getResponses()[1].getFailure());
assertEquals("id2", response.getResponses()[1].getId());
- assertEquals("_doc", response.getResponses()[1].getType());
assertEquals("index", response.getResponses()[1].getIndex());
assertEquals(Collections.singletonMap("field", "value2"), response.getResponses()[1].getResponse().getSource());
}
}
- public void testMultiGetWithTypes() throws IOException {
+ public void testMultiGetWithIds() throws IOException {
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
- bulk.add(new IndexRequest("index", "type", "id1").source("{\"field\":\"value1\"}", XContentType.JSON));
- bulk.add(new IndexRequest("index", "type", "id2").source("{\"field\":\"value2\"}", XContentType.JSON));
+ bulk.add(new IndexRequest("index").id("id1").source("{\"field\":\"value1\"}", XContentType.JSON));
+ bulk.add(new IndexRequest("index").id("id2").source("{\"field\":\"value2\"}", XContentType.JSON));
- highLevelClient().bulk(bulk, expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
MultiGetRequest multiGetRequest = new MultiGetRequest();
multiGetRequest.add("index", "id1");
- multiGetRequest.add("index", "type", "id2");
-
- MultiGetResponse response = execute(
- multiGetRequest,
- highLevelClient()::mget,
- highLevelClient()::mgetAsync,
- expectWarningsOnce(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE)
- );
- assertEquals(2, response.getResponses().length);
-
- GetResponse firstResponse = response.getResponses()[0].getResponse();
- assertEquals("index", firstResponse.getIndex());
- assertEquals("type", firstResponse.getType());
- assertEquals("id1", firstResponse.getId());
-
- GetResponse secondResponse = response.getResponses()[1].getResponse();
- assertEquals("index", secondResponse.getIndex());
- assertEquals("type", secondResponse.getType());
- assertEquals("id2", secondResponse.getId());
+ multiGetRequest.add("index", "id2");
}
public void testGetSource() throws IOException {
@@ -568,7 +476,7 @@ public void testGetSource() throws IOException {
);
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals(
- "OpenSearch exception [type=resource_not_found_exception, " + "reason=Document not found [index]/[_doc]/[does_not_exist]]",
+ "OpenSearch exception [type=resource_not_found_exception, " + "reason=Document not found [index]/[does_not_exist]]",
exception.getMessage()
);
}
@@ -622,7 +530,6 @@ public void testIndex() throws IOException {
assertEquals(RestStatus.CREATED, indexResponse.status());
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
assertEquals("index", indexResponse.getIndex());
- assertEquals("_doc", indexResponse.getType());
assertTrue(Strings.hasLength(indexResponse.getId()));
assertEquals(1L, indexResponse.getVersion());
assertNotNull(indexResponse.getShardId());
@@ -642,7 +549,6 @@ public void testIndex() throws IOException {
IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
assertEquals(RestStatus.CREATED, indexResponse.status());
assertEquals("index", indexResponse.getIndex());
- assertEquals("_doc", indexResponse.getType());
assertEquals("id", indexResponse.getId());
assertEquals(1L, indexResponse.getVersion());
@@ -652,7 +558,6 @@ public void testIndex() throws IOException {
indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
assertEquals(RestStatus.OK, indexResponse.status());
assertEquals("index", indexResponse.getIndex());
- assertEquals("_doc", indexResponse.getType());
assertEquals("id", indexResponse.getId());
assertEquals(2L, indexResponse.getVersion());
@@ -710,7 +615,6 @@ public void testIndex() throws IOException {
IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
assertEquals(RestStatus.CREATED, indexResponse.status());
assertEquals("index", indexResponse.getIndex());
- assertEquals("_doc", indexResponse.getType());
assertEquals("external_version_type", indexResponse.getId());
assertEquals(12L, indexResponse.getVersion());
}
@@ -722,7 +626,6 @@ public void testIndex() throws IOException {
IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
assertEquals(RestStatus.CREATED, indexResponse.status());
assertEquals("index", indexResponse.getIndex());
- assertEquals("_doc", indexResponse.getType());
assertEquals("with_create_op_type", indexResponse.getId());
OpenSearchStatusException exception = expectThrows(
@@ -739,22 +642,6 @@ public void testIndex() throws IOException {
}
}
- public void testIndexWithTypes() throws IOException {
- final XContentType xContentType = randomFrom(XContentType.values());
- IndexRequest indexRequest = new IndexRequest("index", "some_type", "some_id");
- indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("test", "test").endObject());
- IndexResponse indexResponse = execute(
- indexRequest,
- highLevelClient()::index,
- highLevelClient()::indexAsync,
- expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
- );
- assertEquals(RestStatus.CREATED, indexResponse.status());
- assertEquals("index", indexResponse.getIndex());
- assertEquals("some_type", indexResponse.getType());
- assertEquals("some_id", indexResponse.getId());
- }
-
public void testUpdate() throws IOException {
{
UpdateRequest updateRequest = new UpdateRequest("index", "does_not_exist");
@@ -766,7 +653,7 @@ public void testUpdate() throws IOException {
);
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals(
- "OpenSearch exception [type=document_missing_exception, reason=[_doc][does_not_exist]: document missing]",
+ "OpenSearch exception [type=document_missing_exception, reason=[does_not_exist]: document missing]",
exception.getMessage()
);
}
@@ -891,7 +778,6 @@ public void testUpdate() throws IOException {
UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
assertEquals(RestStatus.CREATED, updateResponse.status());
assertEquals("index", updateResponse.getIndex());
- assertEquals("_doc", updateResponse.getType());
assertEquals("with_upsert", updateResponse.getId());
GetResult getResult = updateResponse.getGetResult();
assertEquals(1L, updateResponse.getVersion());
@@ -906,7 +792,6 @@ public void testUpdate() throws IOException {
UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
assertEquals(RestStatus.CREATED, updateResponse.status());
assertEquals("index", updateResponse.getIndex());
- assertEquals("_doc", updateResponse.getType());
assertEquals("with_doc_as_upsert", updateResponse.getId());
GetResult getResult = updateResponse.getGetResult();
assertEquals(1L, updateResponse.getVersion());
@@ -922,7 +807,6 @@ public void testUpdate() throws IOException {
UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
assertEquals(RestStatus.CREATED, updateResponse.status());
assertEquals("index", updateResponse.getIndex());
- assertEquals("_doc", updateResponse.getType());
assertEquals("with_scripted_upsert", updateResponse.getId());
GetResult getResult = updateResponse.getGetResult();
@@ -955,29 +839,6 @@ public void testUpdate() throws IOException {
}
}
- public void testUpdateWithTypes() throws IOException {
- IndexRequest indexRequest = new IndexRequest("index", "type", "id");
- indexRequest.source(singletonMap("field", "value"));
- IndexResponse indexResponse = execute(
- indexRequest,
- highLevelClient()::index,
- highLevelClient()::indexAsync,
- expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
- );
-
- UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
- updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values()));
- UpdateResponse updateResponse = execute(
- updateRequest,
- highLevelClient()::update,
- highLevelClient()::updateAsync,
- expectWarningsOnce(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)
- );
-
- assertEquals(RestStatus.OK, updateResponse.status());
- assertEquals(indexResponse.getVersion() + 1, updateResponse.getVersion());
- }
-
public void testBulk() throws IOException {
int nbItems = randomIntBetween(10, 100);
boolean[] errors = new boolean[nbItems];
@@ -1141,7 +1002,6 @@ private void validateBulkResponses(int nbItems, boolean[] errors, BulkResponse b
assertEquals(i, bulkItemResponse.getItemId());
assertEquals("index", bulkItemResponse.getIndex());
- assertEquals("_doc", bulkItemResponse.getType());
assertEquals(String.valueOf(i), bulkItemResponse.getId());
DocWriteRequest.OpType requestOpType = bulkRequest.requests().get(i).opType();
@@ -1167,7 +1027,6 @@ public void testUrlEncode() throws IOException {
indexRequest.source("field", "value");
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
assertEquals(expectedIndex, indexResponse.getIndex());
- assertEquals("_doc", indexResponse.getType());
assertEquals("id#1", indexResponse.getId());
}
{
@@ -1175,7 +1034,6 @@ public void testUrlEncode() throws IOException {
GetResponse getResponse = highLevelClient().get(getRequest, RequestOptions.DEFAULT);
assertTrue(getResponse.isExists());
assertEquals(expectedIndex, getResponse.getIndex());
- assertEquals("_doc", getResponse.getType());
assertEquals("id#1", getResponse.getId());
}
@@ -1185,7 +1043,6 @@ public void testUrlEncode() throws IOException {
indexRequest.source("field", "value");
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
assertEquals("index", indexResponse.getIndex());
- assertEquals("_doc", indexResponse.getType());
assertEquals(docId, indexResponse.getId());
}
{
@@ -1193,7 +1050,6 @@ public void testUrlEncode() throws IOException {
GetResponse getResponse = highLevelClient().get(getRequest, RequestOptions.DEFAULT);
assertTrue(getResponse.isExists());
assertEquals("index", getResponse.getIndex());
- assertEquals("_doc", getResponse.getType());
assertEquals(docId, getResponse.getId());
}
@@ -1209,7 +1065,6 @@ public void testParamsEncode() throws IOException {
indexRequest.routing(routing);
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
assertEquals("index", indexResponse.getIndex());
- assertEquals("_doc", indexResponse.getType());
assertEquals("id", indexResponse.getId());
}
{
@@ -1217,7 +1072,6 @@ public void testParamsEncode() throws IOException {
GetResponse getResponse = highLevelClient().get(getRequest, RequestOptions.DEFAULT);
assertTrue(getResponse.isExists());
assertEquals("index", getResponse.getIndex());
- assertEquals("_doc", getResponse.getType());
assertEquals("id", getResponse.getId());
assertEquals(routing, getResponse.getField("_routing").getValue());
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java
index d33abb0552776..69ce518173042 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java
@@ -46,7 +46,6 @@
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
import org.opensearch.action.admin.indices.flush.FlushRequest;
import org.opensearch.action.admin.indices.flush.FlushResponse;
-import org.opensearch.action.admin.indices.flush.SyncedFlushRequest;
import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.opensearch.action.admin.indices.forcemerge.ForceMergeResponse;
import org.opensearch.action.admin.indices.open.OpenIndexRequest;
@@ -126,15 +125,11 @@
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
-import org.opensearch.indices.flush.SyncedFlushService;
import org.opensearch.rest.RestStatus;
import org.opensearch.rest.action.admin.indices.RestCreateIndexAction;
-import org.opensearch.rest.action.admin.indices.RestGetFieldMappingAction;
import org.opensearch.rest.action.admin.indices.RestGetIndexTemplateAction;
import org.opensearch.rest.action.admin.indices.RestGetIndicesAction;
-import org.opensearch.rest.action.admin.indices.RestGetMappingAction;
import org.opensearch.rest.action.admin.indices.RestPutIndexTemplateAction;
-import org.opensearch.rest.action.admin.indices.RestPutMappingAction;
import org.opensearch.rest.action.admin.indices.RestRolloverIndexAction;
import java.io.IOException;
@@ -583,32 +578,6 @@ public void testPutMapping() throws IOException {
assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse));
}
- public void testPutMappingWithTypes() throws IOException {
- String indexName = "mapping_index";
- createIndex(indexName, Settings.EMPTY);
-
- org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest =
- new org.opensearch.action.admin.indices.mapping.put.PutMappingRequest(indexName);
- putMappingRequest.type("some_type");
-
- XContentBuilder mappingBuilder = JsonXContent.contentBuilder();
- mappingBuilder.startObject().startObject("properties").startObject("field");
- mappingBuilder.field("type", "text");
- mappingBuilder.endObject().endObject().endObject();
- putMappingRequest.source(mappingBuilder);
-
- AcknowledgedResponse putMappingResponse = execute(
- putMappingRequest,
- highLevelClient().indices()::putMapping,
- highLevelClient().indices()::putMappingAsync,
- expectWarningsOnce(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE)
- );
- assertTrue(putMappingResponse.isAcknowledged());
-
- Map getIndexResponse = getAsMap(indexName);
- assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse));
- }
-
public void testGetMapping() throws IOException {
String indexName = "test";
createIndex(indexName, Settings.EMPTY);
@@ -648,47 +617,6 @@ public void testGetMapping() throws IOException {
assertThat(mappings, equalTo(expected));
}
- public void testGetMappingWithTypes() throws IOException {
- String indexName = "test";
- createIndex(indexName, Settings.EMPTY);
-
- PutMappingRequest putMappingRequest = new PutMappingRequest(indexName);
- XContentBuilder mappingBuilder = JsonXContent.contentBuilder();
- mappingBuilder.startObject().startObject("properties").startObject("field");
- mappingBuilder.field("type", "text");
- mappingBuilder.endObject().endObject().endObject();
- putMappingRequest.source(mappingBuilder);
-
- AcknowledgedResponse putMappingResponse = execute(
- putMappingRequest,
- highLevelClient().indices()::putMapping,
- highLevelClient().indices()::putMappingAsync
- );
- assertTrue(putMappingResponse.isAcknowledged());
-
- Map getIndexResponse = getAsMap(indexName);
- assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse));
-
- org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest request =
- new org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest().indices(indexName);
-
- org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse getMappingsResponse = execute(
- request,
- highLevelClient().indices()::getMapping,
- highLevelClient().indices()::getMappingAsync,
- expectWarningsOnce(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE)
- );
-
- Map mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap();
- Map type = new HashMap<>();
- type.put("type", "text");
- Map field = new HashMap<>();
- field.put("field", type);
- Map expected = new HashMap<>();
- expected.put("properties", field);
- assertThat(mappings, equalTo(expected));
- }
-
public void testGetFieldMapping() throws IOException {
String indexName = "test";
createIndex(indexName, Settings.EMPTY);
@@ -725,45 +653,6 @@ public void testGetFieldMapping() throws IOException {
assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metadata)));
}
- public void testGetFieldMappingWithTypes() throws IOException {
- String indexName = "test";
- createIndex(indexName, Settings.EMPTY);
-
- PutMappingRequest putMappingRequest = new PutMappingRequest(indexName);
- XContentBuilder mappingBuilder = JsonXContent.contentBuilder();
- mappingBuilder.startObject().startObject("properties").startObject("field");
- mappingBuilder.field("type", "text");
- mappingBuilder.endObject().endObject().endObject();
- putMappingRequest.source(mappingBuilder);
-
- AcknowledgedResponse putMappingResponse = execute(
- putMappingRequest,
- highLevelClient().indices()::putMapping,
- highLevelClient().indices()::putMappingAsync
- );
- assertTrue(putMappingResponse.isAcknowledged());
-
- org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest =
- new org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest().indices(indexName).types("_doc").fields("field");
-
- org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse getFieldMappingsResponse = execute(
- getFieldMappingsRequest,
- highLevelClient().indices()::getFieldMapping,
- highLevelClient().indices()::getFieldMappingAsync,
- expectWarningsOnce(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE)
- );
-
- final Map fieldMappingMap =
- getFieldMappingsResponse.mappings().get(indexName).get("_doc");
-
- final org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata metadata =
- new org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata(
- "field",
- new BytesArray("{\"field\":{\"type\":\"text\"}}")
- );
- assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metadata)));
- }
-
public void testDeleteIndex() throws IOException {
{
// Delete index if exists
@@ -1080,39 +969,6 @@ public void testFlush() throws IOException {
}
}
- public void testSyncedFlush() throws IOException {
- {
- String index = "index";
- Settings settings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build();
- createIndex(index, settings);
- SyncedFlushRequest syncedFlushRequest = new SyncedFlushRequest(index);
- SyncedFlushResponse flushResponse = execute(
- syncedFlushRequest,
- highLevelClient().indices()::flushSynced,
- highLevelClient().indices()::flushSyncedAsync,
- expectWarningsOnce(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE)
- );
- assertThat(flushResponse.totalShards(), equalTo(1));
- assertThat(flushResponse.successfulShards(), equalTo(1));
- assertThat(flushResponse.failedShards(), equalTo(0));
- }
- {
- String nonExistentIndex = "non_existent_index";
- assertFalse(indexExists(nonExistentIndex));
- SyncedFlushRequest syncedFlushRequest = new SyncedFlushRequest(nonExistentIndex);
- OpenSearchException exception = expectThrows(
- OpenSearchException.class,
- () -> execute(
- syncedFlushRequest,
- highLevelClient().indices()::flushSynced,
- highLevelClient().indices()::flushSyncedAsync,
- expectWarningsOnce(SyncedFlushService.SYNCED_FLUSH_DEPRECATION_MESSAGE)
- )
- );
- assertEquals(RestStatus.NOT_FOUND, exception.status());
- }
- }
-
public void testClearCache() throws IOException {
{
String index = "index";
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java
index 28728a95ae976..0ea2280b386eb 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java
@@ -45,7 +45,6 @@
import org.opensearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
import org.opensearch.action.admin.indices.flush.FlushRequest;
-import org.opensearch.action.admin.indices.flush.SyncedFlushRequest;
import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.opensearch.action.admin.indices.open.OpenIndexRequest;
import org.opensearch.action.admin.indices.refresh.RefreshRequest;
@@ -750,33 +749,6 @@ public void testFlush() {
Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
}
- public void testSyncedFlush() {
- String[] indices = OpenSearchTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
- SyncedFlushRequest syncedFlushRequest;
- if (OpenSearchTestCase.randomBoolean()) {
- syncedFlushRequest = new SyncedFlushRequest(indices);
- } else {
- syncedFlushRequest = new SyncedFlushRequest();
- syncedFlushRequest.indices(indices);
- }
- Map expectedParams = new HashMap<>();
- RequestConvertersTests.setRandomIndicesOptions(
- syncedFlushRequest::indicesOptions,
- syncedFlushRequest::indicesOptions,
- expectedParams
- );
- Request request = IndicesRequestConverters.flushSynced(syncedFlushRequest);
- StringJoiner endpoint = new StringJoiner("/", "/", "");
- if (indices != null && indices.length > 0) {
- endpoint.add(String.join(",", indices));
- }
- endpoint.add("_flush/synced");
- Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
- Assert.assertThat(request.getParameters(), equalTo(expectedParams));
- Assert.assertThat(request.getEntity(), nullValue());
- Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
- }
-
public void testForceMerge() {
String[] indices = OpenSearchTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
ForceMergeRequest forceMergeRequest;
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java
index 51b0ce00a14cd..32c6cde0725b4 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java
@@ -172,10 +172,6 @@ public void testGet() {
getAndExistsTest(RequestConverters::get, HttpGet.METHOD_NAME);
}
- public void testGetWithType() {
- getAndExistsWithTypeTest(RequestConverters::get, HttpGet.METHOD_NAME);
- }
-
public void testSourceExists() throws IOException {
doTestSourceExists((index, id) -> new GetSourceRequest(index, id));
}
@@ -221,13 +217,7 @@ private static void doTestSourceExists(BiFunction requestConverter, String method) {
String index = randomAlphaOfLengthBetween(3, 10);
String id = randomAlphaOfLengthBetween(3, 10);
@@ -435,18 +409,6 @@ private static void getAndExistsTest(Function requestConver
assertEquals(method, request.getMethod());
}
- private static void getAndExistsWithTypeTest(Function requestConverter, String method) {
- String index = randomAlphaOfLengthBetween(3, 10);
- String type = randomAlphaOfLengthBetween(3, 10);
- String id = randomAlphaOfLengthBetween(3, 10);
- GetRequest getRequest = new GetRequest(index, type, id);
-
- Request request = requestConverter.apply(getRequest);
- assertEquals("/" + index + "/" + type + "/" + id, request.getEndpoint());
- assertNull(request.getEntity());
- assertEquals(method, request.getMethod());
- }
-
public void testReindex() throws IOException {
ReindexRequest reindexRequest = new ReindexRequest();
reindexRequest.setSourceIndices("source_idx");
@@ -468,15 +430,9 @@ public void testReindex() throws IOException {
);
reindexRequest.setRemoteInfo(remoteInfo);
}
- if (randomBoolean()) {
- reindexRequest.setSourceDocTypes("doc", "tweet");
- }
if (randomBoolean()) {
reindexRequest.setSourceBatchSize(randomInt(100));
}
- if (randomBoolean()) {
- reindexRequest.setDestDocType("tweet_and_doc");
- }
if (randomBoolean()) {
reindexRequest.setDestOpType("create");
}
@@ -536,9 +492,6 @@ public void testUpdateByQuery() throws IOException {
UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
updateByQueryRequest.indices(randomIndicesNames(1, 5));
Map expectedParams = new HashMap<>();
- if (randomBoolean()) {
- updateByQueryRequest.setDocTypes(generateRandomStringArray(5, 5, false, false));
- }
if (randomBoolean()) {
int batchSize = randomInt(100);
updateByQueryRequest.setBatchSize(batchSize);
@@ -600,9 +553,6 @@ public void testUpdateByQuery() throws IOException {
Request request = RequestConverters.updateByQuery(updateByQueryRequest);
StringJoiner joiner = new StringJoiner("/", "/", "");
joiner.add(String.join(",", updateByQueryRequest.indices()));
- if (updateByQueryRequest.getDocTypes().length > 0) {
- joiner.add(String.join(",", updateByQueryRequest.getDocTypes()));
- }
joiner.add("_update_by_query");
assertEquals(joiner.toString(), request.getEndpoint());
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
@@ -614,9 +564,6 @@ public void testDeleteByQuery() throws IOException {
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
deleteByQueryRequest.indices(randomIndicesNames(1, 5));
Map expectedParams = new HashMap<>();
- if (randomBoolean()) {
- deleteByQueryRequest.setDocTypes(generateRandomStringArray(5, 5, false, false));
- }
if (randomBoolean()) {
int batchSize = randomInt(100);
deleteByQueryRequest.setBatchSize(batchSize);
@@ -671,9 +618,6 @@ public void testDeleteByQuery() throws IOException {
Request request = RequestConverters.deleteByQuery(deleteByQueryRequest);
StringJoiner joiner = new StringJoiner("/", "/", "");
joiner.add(String.join(",", deleteByQueryRequest.indices()));
- if (deleteByQueryRequest.getDocTypes().length > 0) {
- joiner.add(String.join(",", deleteByQueryRequest.getDocTypes()));
- }
joiner.add("_delete_by_query");
assertEquals(joiner.toString(), request.getEndpoint());
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
@@ -793,49 +737,6 @@ public void testIndex() throws IOException {
}
}
- public void testIndexWithType() throws IOException {
- String index = randomAlphaOfLengthBetween(3, 10);
- String type = randomAlphaOfLengthBetween(3, 10);
- IndexRequest indexRequest = new IndexRequest(index, type);
- String id = randomBoolean() ? randomAlphaOfLengthBetween(3, 10) : null;
- indexRequest.id(id);
-
- String method = HttpPost.METHOD_NAME;
- if (id != null) {
- method = HttpPut.METHOD_NAME;
- if (randomBoolean()) {
- indexRequest.opType(DocWriteRequest.OpType.CREATE);
- }
- }
- XContentType xContentType = randomFrom(XContentType.values());
- int nbFields = randomIntBetween(0, 10);
- try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
- builder.startObject();
- for (int i = 0; i < nbFields; i++) {
- builder.field("field_" + i, i);
- }
- builder.endObject();
- indexRequest.source(builder);
- }
-
- Request request = RequestConverters.index(indexRequest);
- if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) {
- assertEquals("/" + index + "/" + type + "/" + id + "/_create", request.getEndpoint());
- } else if (id != null) {
- assertEquals("/" + index + "/" + type + "/" + id, request.getEndpoint());
- } else {
- assertEquals("/" + index + "/" + type, request.getEndpoint());
- }
- assertEquals(method, request.getMethod());
-
- HttpEntity entity = request.getEntity();
- assertTrue(entity instanceof NByteArrayEntity);
- assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue());
- try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) {
- assertEquals(nbFields, parser.map().size());
- }
- }
-
public void testUpdate() throws IOException {
XContentType xContentType = randomFrom(XContentType.values());
@@ -944,23 +845,6 @@ private static void setRandomIfSeqNoAndTerm(DocWriteRequest> request, Map {
UpdateRequest updateRequest = new UpdateRequest();
@@ -1055,7 +939,6 @@ public void testBulk() throws IOException {
assertEquals(originalRequest.opType(), parsedRequest.opType());
assertEquals(originalRequest.index(), parsedRequest.index());
- assertEquals(originalRequest.type(), parsedRequest.type());
assertEquals(originalRequest.id(), parsedRequest.id());
assertEquals(originalRequest.routing(), parsedRequest.routing());
assertEquals(originalRequest.version(), parsedRequest.version());
@@ -1191,10 +1074,6 @@ public void testSearch() throws Exception {
if (Strings.hasLength(index)) {
endpoint.add(index);
}
- String type = String.join(",", searchRequest.types());
- if (Strings.hasLength(type)) {
- endpoint.add(type);
- }
endpoint.add(searchEndpoint);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals(endpoint.toString(), request.getEndpoint());
@@ -1204,14 +1083,6 @@ public void testSearch() throws Exception {
public static SearchRequest createTestSearchRequest(String[] indices, Map expectedParams) {
SearchRequest searchRequest = new SearchRequest(indices);
-
- int numTypes = randomIntBetween(0, 5);
- String[] types = new String[numTypes];
- for (int i = 0; i < numTypes; i++) {
- types[i] = "type-" + randomAlphaOfLengthBetween(2, 5);
- }
- searchRequest.types(types);
-
setRandomSearchParams(searchRequest, expectedParams);
setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
@@ -1278,7 +1149,6 @@ public static SearchSourceBuilder createTestSearchSourceBuilder() {
public void testSearchNullIndicesAndTypes() {
expectThrows(NullPointerException.class, () -> new SearchRequest((String[]) null));
expectThrows(NullPointerException.class, () -> new SearchRequest().indices((String[]) null));
- expectThrows(NullPointerException.class, () -> new SearchRequest().types((String[]) null));
}
public void testCountNotNullSource() throws IOException {
@@ -1293,14 +1163,6 @@ public void testCountNotNullSource() throws IOException {
public void testCount() throws Exception {
String[] indices = randomIndicesNames(0, 5);
CountRequest countRequest = new CountRequest(indices);
-
- int numTypes = randomIntBetween(0, 5);
- String[] types = new String[numTypes];
- for (int i = 0; i < numTypes; i++) {
- types[i] = "type-" + randomAlphaOfLengthBetween(2, 5);
- }
- countRequest.types(types);
-
Map expectedParams = new HashMap<>();
setRandomCountParams(countRequest, expectedParams);
setRandomIndicesOptions(countRequest::indicesOptions, countRequest::indicesOptions, expectedParams);
@@ -1317,10 +1179,6 @@ public void testCount() throws Exception {
if (Strings.hasLength(index)) {
endpoint.add(index);
}
- String type = String.join(",", types);
- if (Strings.hasLength(type)) {
- endpoint.add(type);
- }
endpoint.add("_count");
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals(endpoint.toString(), request.getEndpoint());
@@ -1328,12 +1186,6 @@ public void testCount() throws Exception {
assertToXContentBody(countRequest, request.getEntity());
}
- public void testCountNullIndicesAndTypes() {
- expectThrows(NullPointerException.class, () -> new CountRequest((String[]) null));
- expectThrows(NullPointerException.class, () -> new CountRequest().indices((String[]) null));
- expectThrows(NullPointerException.class, () -> new CountRequest().types((String[]) null));
- }
-
private static void setRandomCountParams(CountRequest countRequest, Map expectedParams) {
if (randomBoolean()) {
countRequest.routing(randomAlphaOfLengthBetween(3, 10));
@@ -1416,7 +1268,6 @@ public void testMultiSearch() throws IOException {
null,
null,
null,
- null,
xContentRegistry(),
true,
deprecationLogger
@@ -1602,21 +1453,6 @@ public void testExplain() throws IOException {
assertToXContentBody(explainRequest, request.getEntity());
}
- public void testExplainWithType() throws IOException {
- String index = randomAlphaOfLengthBetween(3, 10);
- String type = randomAlphaOfLengthBetween(3, 10);
- String id = randomAlphaOfLengthBetween(3, 10);
-
- ExplainRequest explainRequest = new ExplainRequest(index, type, id);
- explainRequest.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10)));
-
- Request request = RequestConverters.explain(explainRequest);
- assertEquals(HttpGet.METHOD_NAME, request.getMethod());
- assertEquals("/" + index + "/" + type + "/" + id + "/_explain", request.getEndpoint());
-
- assertToXContentBody(explainRequest, request.getEntity());
- }
-
public void testTermVectors() throws IOException {
String index = randomAlphaOfLengthBetween(3, 10);
String id = randomAlphaOfLengthBetween(3, 10);
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java
index 22ff3aebae9ac..19e287fb91be5 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java
@@ -64,7 +64,6 @@
import org.opensearch.join.aggregations.Children;
import org.opensearch.join.aggregations.ChildrenAggregationBuilder;
import org.opensearch.rest.RestStatus;
-import org.opensearch.rest.action.document.RestIndexAction;
import org.opensearch.script.Script;
import org.opensearch.script.ScriptType;
import org.opensearch.script.mustache.MultiSearchTemplateRequest;
@@ -125,24 +124,19 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
@Before
public void indexDocuments() throws IOException {
{
- Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1");
- doc1.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/_doc/1");
doc1.setJsonEntity("{\"type\":\"type1\", \"id\":1, \"num\":10, \"num2\":50}");
client().performRequest(doc1);
- Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2");
- doc2.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
+ Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/_doc/2");
doc2.setJsonEntity("{\"type\":\"type1\", \"id\":2, \"num\":20, \"num2\":40}");
client().performRequest(doc2);
- Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3");
- doc3.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
+ Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/_doc/3");
doc3.setJsonEntity("{\"type\":\"type1\", \"id\":3, \"num\":50, \"num2\":35}");
client().performRequest(doc3);
- Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4");
- doc4.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
+ Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/_doc/4");
doc4.setJsonEntity("{\"type\":\"type2\", \"id\":4, \"num\":100, \"num2\":10}");
client().performRequest(doc4);
- Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5");
- doc5.setOptions(expectWarningsOnce(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
+ Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/_doc/5");
doc5.setJsonEntity("{\"type\":\"type2\", \"id\":5, \"num\":100, \"num2\":10}");
client().performRequest(doc5);
}
@@ -241,13 +235,11 @@ public void testSearchNoQuery() throws IOException {
assertEquals(5, searchResponse.getHits().getHits().length);
for (SearchHit searchHit : searchResponse.getHits().getHits()) {
assertEquals("index", searchHit.getIndex());
- assertEquals("type", searchHit.getType());
assertThat(Integer.valueOf(searchHit.getId()), both(greaterThan(0)).and(lessThan(6)));
assertEquals(1.0f, searchHit.getScore(), 0);
assertEquals(-1L, searchHit.getVersion());
assertNotNull(searchHit.getSourceAsMap());
assertEquals(4, searchHit.getSourceAsMap().size());
- assertTrue(searchHit.getSourceAsMap().containsKey("type"));
assertTrue(searchHit.getSourceAsMap().containsKey("num"));
assertTrue(searchHit.getSourceAsMap().containsKey("num2"));
}
@@ -266,7 +258,6 @@ public void testSearchMatchQuery() throws IOException {
assertThat(searchResponse.getHits().getMaxScore(), greaterThan(0f));
SearchHit searchHit = searchResponse.getHits().getHits()[0];
assertEquals("index", searchHit.getIndex());
- assertEquals("type", searchHit.getType());
assertEquals("1", searchHit.getId());
assertThat(searchHit.getScore(), greaterThan(0f));
assertEquals(-1L, searchHit.getVersion());
@@ -1143,7 +1134,6 @@ public void testExplain() throws IOException {
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
assertThat(explainResponse.getIndex(), equalTo("index1"));
- assertThat(explainResponse.getType(), equalTo("_doc"));
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
assertTrue(explainResponse.isExists());
assertTrue(explainResponse.isMatch());
@@ -1158,7 +1148,6 @@ public void testExplain() throws IOException {
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
assertThat(explainResponse.getIndex(), equalTo("index1"));
- assertThat(explainResponse.getType(), equalTo("_doc"));
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
assertTrue(explainResponse.isExists());
assertTrue(explainResponse.isMatch());
@@ -1173,7 +1162,6 @@ public void testExplain() throws IOException {
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
assertThat(explainResponse.getIndex(), equalTo("index1"));
- assertThat(explainResponse.getType(), equalTo("_doc"));
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
assertTrue(explainResponse.isExists());
assertFalse(explainResponse.isMatch());
@@ -1189,7 +1177,6 @@ public void testExplain() throws IOException {
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
assertThat(explainResponse.getIndex(), equalTo("index1"));
- assertThat(explainResponse.getType(), equalTo("_doc"));
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
assertTrue(explainResponse.isExists());
assertFalse(explainResponse.isMatch());
@@ -1221,7 +1208,6 @@ public void testExplainNonExistent() throws IOException {
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
assertThat(explainResponse.getIndex(), equalTo("index1"));
- assertThat(explainResponse.getType(), equalTo("_doc"));
assertThat(explainResponse.getId(), equalTo("999"));
assertFalse(explainResponse.isExists());
assertFalse(explainResponse.isMatch());
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SyncedFlushResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/SyncedFlushResponseTests.java
deleted file mode 100644
index e56e78d5d9caf..0000000000000
--- a/client/rest-high-level/src/test/java/org/opensearch/client/SyncedFlushResponseTests.java
+++ /dev/null
@@ -1,258 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package org.opensearch.client;
-
-import com.carrotsearch.hppc.ObjectIntHashMap;
-import com.carrotsearch.hppc.ObjectIntMap;
-import org.opensearch.cluster.routing.ShardRouting;
-import org.opensearch.cluster.routing.ShardRoutingState;
-import org.opensearch.cluster.routing.TestShardRouting;
-import org.opensearch.common.bytes.BytesReference;
-import org.opensearch.common.xcontent.DeprecationHandler;
-import org.opensearch.common.xcontent.ToXContent;
-import org.opensearch.common.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentParser;
-import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.index.shard.ShardId;
-import org.opensearch.indices.flush.ShardsSyncedFlushResult;
-import org.opensearch.indices.flush.SyncedFlushService;
-import org.opensearch.test.OpenSearchTestCase;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-public class SyncedFlushResponseTests extends OpenSearchTestCase {
-
- public void testXContentSerialization() throws IOException {
- final XContentType xContentType = randomFrom(XContentType.values());
- TestPlan plan = createTestPlan();
-
- XContentBuilder serverResponsebuilder = XContentBuilder.builder(xContentType.xContent());
- assertNotNull(plan.result);
- serverResponsebuilder.startObject();
- plan.result.toXContent(serverResponsebuilder, ToXContent.EMPTY_PARAMS);
- serverResponsebuilder.endObject();
- XContentBuilder clientResponsebuilder = XContentBuilder.builder(xContentType.xContent());
- assertNotNull(plan.result);
- plan.clientResult.toXContent(clientResponsebuilder, ToXContent.EMPTY_PARAMS);
- Map serverContentMap = convertFailureListToSet(
- serverResponsebuilder.generator()
- .contentType()
- .xContent()
- .createParser(
- xContentRegistry(),
- DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
- BytesReference.bytes(serverResponsebuilder).streamInput()
- )
- .map()
- );
- Map clientContentMap = convertFailureListToSet(
- clientResponsebuilder.generator()
- .contentType()
- .xContent()
- .createParser(
- xContentRegistry(),
- DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
- BytesReference.bytes(clientResponsebuilder).streamInput()
- )
- .map()
- );
- assertEquals(serverContentMap, clientContentMap);
- }
-
- public void testXContentDeserialization() throws IOException {
- final XContentType xContentType = randomFrom(XContentType.values());
- TestPlan plan = createTestPlan();
- XContentBuilder builder = XContentBuilder.builder(xContentType.xContent());
- builder.startObject();
- plan.result.toXContent(builder, ToXContent.EMPTY_PARAMS);
- builder.endObject();
- XContentParser parser = builder.generator()
- .contentType()
- .xContent()
- .createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(builder).streamInput());
- SyncedFlushResponse originalResponse = plan.clientResult;
- SyncedFlushResponse parsedResponse = SyncedFlushResponse.fromXContent(parser);
- assertNotNull(parsedResponse);
- assertShardCounts(originalResponse.getShardCounts(), parsedResponse.getShardCounts());
- for (Map.Entry entry : originalResponse.getIndexResults().entrySet()) {
- String index = entry.getKey();
- SyncedFlushResponse.IndexResult responseResult = entry.getValue();
- SyncedFlushResponse.IndexResult parsedResult = parsedResponse.getIndexResults().get(index);
- assertNotNull(responseResult);
- assertNotNull(parsedResult);
- assertShardCounts(responseResult.getShardCounts(), parsedResult.getShardCounts());
- assertEquals(responseResult.failures().size(), parsedResult.failures().size());
- for (SyncedFlushResponse.ShardFailure responseShardFailure : responseResult.failures()) {
- assertTrue(containsFailure(parsedResult.failures(), responseShardFailure));
- }
- }
- }
-
- static class TestPlan {
- SyncedFlushResponse.ShardCounts totalCounts;
- Map countsPerIndex = new HashMap<>();
- ObjectIntMap expectedFailuresPerIndex = new ObjectIntHashMap<>();
- org.opensearch.action.admin.indices.flush.SyncedFlushResponse result;
- SyncedFlushResponse clientResult;
- }
-
- TestPlan createTestPlan() throws IOException {
- final TestPlan testPlan = new TestPlan();
- final Map> indicesResults = new HashMap<>();
- Map indexResults = new HashMap<>();
- final XContentType xContentType = randomFrom(XContentType.values());
- final int indexCount = randomIntBetween(1, 10);
- int totalShards = 0;
- int totalSuccessful = 0;
- int totalFailed = 0;
- for (int i = 0; i < indexCount; i++) {
- final String index = "index_" + i;
- int shards = randomIntBetween(1, 4);
- int replicas = randomIntBetween(0, 2);
- int successful = 0;
- int failed = 0;
- int failures = 0;
- List shardsResults = new ArrayList<>();
- List shardFailures = new ArrayList<>();
- for (int shard = 0; shard < shards; shard++) {
- final ShardId shardId = new ShardId(index, "_na_", shard);
- if (randomInt(5) < 2) {
- // total shard failure
- failed += replicas + 1;
- failures++;
- shardsResults.add(new ShardsSyncedFlushResult(shardId, replicas + 1, "simulated total failure"));
- shardFailures.add(new SyncedFlushResponse.ShardFailure(shardId.id(), "simulated total failure", new HashMap<>()));
- } else {
- Map shardResponses = new HashMap<>();
- for (int copy = 0; copy < replicas + 1; copy++) {
- final ShardRouting shardRouting = TestShardRouting.newShardRouting(
- index,
- shard,
- "node_" + shardId + "_" + copy,
- null,
- copy == 0,
- ShardRoutingState.STARTED
- );
- if (randomInt(5) < 2) {
- // shard copy failure
- failed++;
- failures++;
- shardResponses.put(shardRouting, new SyncedFlushService.ShardSyncedFlushResponse("copy failure " + shardId));
- // Building the shardRouting map here.
- XContentBuilder builder = XContentBuilder.builder(xContentType.xContent());
- Map routing = shardRouting.toXContent(builder, ToXContent.EMPTY_PARAMS)
- .generator()
- .contentType()
- .xContent()
- .createParser(
- xContentRegistry(),
- DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
- BytesReference.bytes(builder).streamInput()
- )
- .map();
- shardFailures.add(new SyncedFlushResponse.ShardFailure(shardId.id(), "copy failure " + shardId, routing));
- } else {
- successful++;
- shardResponses.put(shardRouting, new SyncedFlushService.ShardSyncedFlushResponse((String) null));
- }
- }
- shardsResults.add(new ShardsSyncedFlushResult(shardId, "_sync_id_" + shard, replicas + 1, shardResponses));
- }
- }
- indicesResults.put(index, shardsResults);
- indexResults.put(index, new SyncedFlushResponse.IndexResult(shards * (replicas + 1), successful, failed, shardFailures));
- testPlan.countsPerIndex.put(index, new SyncedFlushResponse.ShardCounts(shards * (replicas + 1), successful, failed));
- testPlan.expectedFailuresPerIndex.put(index, failures);
- totalFailed += failed;
- totalShards += shards * (replicas + 1);
- totalSuccessful += successful;
- }
- testPlan.result = new org.opensearch.action.admin.indices.flush.SyncedFlushResponse(indicesResults);
- testPlan.totalCounts = new SyncedFlushResponse.ShardCounts(totalShards, totalSuccessful, totalFailed);
- testPlan.clientResult = new SyncedFlushResponse(
- new SyncedFlushResponse.ShardCounts(totalShards, totalSuccessful, totalFailed),
- indexResults
- );
- return testPlan;
- }
-
- public boolean containsFailure(List failures, SyncedFlushResponse.ShardFailure origFailure) {
- for (SyncedFlushResponse.ShardFailure failure : failures) {
- if (failure.getShardId() == origFailure.getShardId()
- && failure.getFailureReason().equals(origFailure.getFailureReason())
- && failure.getRouting().equals(origFailure.getRouting())) {
- return true;
- }
- }
- return false;
- }
-
- public void assertShardCounts(SyncedFlushResponse.ShardCounts first, SyncedFlushResponse.ShardCounts second) {
- if (first == null) {
- assertNull(second);
- } else {
- assertTrue(first.equals(second));
- }
- }
-
- public Map convertFailureListToSet(Map input) {
- Map retMap = new HashMap<>();
- for (Map.Entry entry : input.entrySet()) {
- if (entry.getKey().equals(SyncedFlushResponse.SHARDS_FIELD)) {
- retMap.put(entry.getKey(), entry.getValue());
- } else {
- // This was an index entry.
- @SuppressWarnings("unchecked")
- Map indexResult = (Map) entry.getValue();
- Map retResult = new HashMap<>();
- for (Map.Entry entry2 : indexResult.entrySet()) {
- if (entry2.getKey().equals(SyncedFlushResponse.IndexResult.FAILURES_FIELD)) {
- @SuppressWarnings("unchecked")
- List failures = (List) entry2.getValue();
- Set retSet = new HashSet<>(failures);
- retResult.put(entry.getKey(), retSet);
- } else {
- retResult.put(entry2.getKey(), entry2.getValue());
- }
- }
- retMap.put(entry.getKey(), retResult);
- }
- }
- return retMap;
- }
-}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/TasksIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/TasksIT.java
index 0db8ee4406c8c..d987e786fff76 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/TasksIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/TasksIT.java
@@ -117,7 +117,7 @@ public void testGetValidTask() throws Exception {
}
org.opensearch.tasks.TaskInfo info = taskResponse.getTaskInfo();
assertTrue(info.isCancellable());
- assertEquals("reindex from [source1] to [dest][_doc]", info.getDescription());
+ assertEquals("reindex from [source1] to [dest]", info.getDescription());
assertEquals("indices:data/write/reindex", info.getAction());
if (taskResponse.isCompleted() == false) {
assertBusy(checkTaskCompletionStatus(client(), taskId));
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/core/TermVectorsResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/core/TermVectorsResponseTests.java
index 11a6aeb6dbe47..33b82c10d8873 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/core/TermVectorsResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/core/TermVectorsResponseTests.java
@@ -59,7 +59,6 @@ public void testFromXContent() throws IOException {
static void toXContent(TermVectorsResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field("_index", response.getIndex());
- builder.field("_type", response.getType());
if (response.getId() != null) {
builder.field("_id", response.getId());
}
@@ -130,7 +129,6 @@ private static void toXContent(TermVectorsResponse.TermVector tv, XContentBuilde
static TermVectorsResponse createTestInstance() {
String index = randomAlphaOfLength(5);
- String type = randomAlphaOfLength(5);
String id = String.valueOf(randomIntBetween(1, 100));
long version = randomNonNegativeLong();
long tookInMillis = randomNonNegativeLong();
@@ -154,7 +152,7 @@ static TermVectorsResponse createTestInstance() {
);
}
}
- TermVectorsResponse tvresponse = new TermVectorsResponse(index, type, id, version, found, tookInMillis, tvList);
+ TermVectorsResponse tvresponse = new TermVectorsResponse(index, id, version, found, tookInMillis, tvList);
return tvresponse;
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java
index 67df99d9d7c08..959c5a827f143 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java
@@ -1719,9 +1719,8 @@ public void testTermVectors() throws Exception {
// tag::term-vectors-response
String index = response.getIndex(); // <1>
- String type = response.getType(); // <2>
- String id = response.getId(); // <3>
- boolean found = response.getFound(); // <4>
+ String id = response.getId(); // <2>
+ boolean found = response.getFound(); // <3>
// end::term-vectors-response
if (response.getTermVectorsList() != null) {
@@ -2051,7 +2050,6 @@ private MultiGetItemResponse unwrapAndAssertExample(MultiGetResponse response) {
assertThat(response.getResponses(), arrayWithSize(1));
MultiGetItemResponse item = response.getResponses()[0];
assertEquals("index", item.getIndex());
- assertEquals("_doc", item.getType());
assertEquals("example_id", item.getId());
return item;
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java
index ad2b0d1e603bb..3fbe7f63b09a2 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java
@@ -44,7 +44,6 @@
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
import org.opensearch.action.admin.indices.flush.FlushRequest;
import org.opensearch.action.admin.indices.flush.FlushResponse;
-import org.opensearch.action.admin.indices.flush.SyncedFlushRequest;
import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.opensearch.action.admin.indices.forcemerge.ForceMergeResponse;
import org.opensearch.action.admin.indices.open.OpenIndexRequest;
@@ -69,7 +68,6 @@
import org.opensearch.client.GetAliasesResponse;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.RestHighLevelClient;
-import org.opensearch.client.SyncedFlushResponse;
import org.opensearch.client.indices.AnalyzeRequest;
import org.opensearch.client.indices.AnalyzeResponse;
import org.opensearch.client.indices.CloseIndexRequest;
@@ -1012,94 +1010,6 @@ public void onFailure(Exception e) {
}
}
- @SuppressWarnings("unused")
- public void testSyncedFlushIndex() throws Exception {
- RestHighLevelClient client = highLevelClient();
-
- {
- createIndex("index1", Settings.EMPTY);
- }
-
- {
- // tag::flush-synced-request
- SyncedFlushRequest request = new SyncedFlushRequest("index1"); // <1>
- SyncedFlushRequest requestMultiple = new SyncedFlushRequest("index1", "index2"); // <2>
- SyncedFlushRequest requestAll = new SyncedFlushRequest(); // <3>
- // end::flush-synced-request
-
- // tag::flush-synced-request-indicesOptions
- request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1>
- // end::flush-synced-request-indicesOptions
-
- // tag::flush-synced-execute
- SyncedFlushResponse flushSyncedResponse = client.indices().flushSynced(request, expectWarnings(
- "Synced flush is deprecated and will be removed in 8.0. Use flush at _/flush or /{index}/_flush instead."
- ));
- // end::flush-synced-execute
-
- // tag::flush-synced-response
- int totalShards = flushSyncedResponse.totalShards(); // <1>
- int successfulShards = flushSyncedResponse.successfulShards(); // <2>
- int failedShards = flushSyncedResponse.failedShards(); // <3>
-
- for (Map.Entry responsePerIndexEntry:
- flushSyncedResponse.getIndexResults().entrySet()) {
- String indexName = responsePerIndexEntry.getKey(); // <4>
- SyncedFlushResponse.IndexResult indexResult = responsePerIndexEntry.getValue();
- int totalShardsForIndex = indexResult.totalShards(); // <5>
- int successfulShardsForIndex = indexResult.successfulShards(); // <6>
- int failedShardsForIndex = indexResult.failedShards(); // <7>
- if (failedShardsForIndex > 0) {
- for (SyncedFlushResponse.ShardFailure failureEntry: indexResult.failures()) {
- int shardId = failureEntry.getShardId(); // <8>
- String failureReason = failureEntry.getFailureReason(); // <9>
- Map routing = failureEntry.getRouting(); // <10>
- }
- }
- }
- // end::flush-synced-response
-
- // tag::flush-synced-execute-listener
- ActionListener listener = new ActionListener() {
- @Override
- public void onResponse(SyncedFlushResponse refreshResponse) {
- // <1>
- }
-
- @Override
- public void onFailure(Exception e) {
- // <2>
- }
- };
- // end::flush-synced-execute-listener
-
- // Replace the empty listener by a blocking listener in test
- final CountDownLatch latch = new CountDownLatch(1);
- listener = new LatchedActionListener<>(listener, latch);
-
- // tag::flush-synced-execute-async
- client.indices().flushSyncedAsync(request, expectWarnings(
- "Synced flush is deprecated and will be removed in 8.0. Use flush at _/flush or /{index}/_flush instead."
- ), listener); // <1>
- // end::flush-synced-execute-async
-
- assertTrue(latch.await(30L, TimeUnit.SECONDS));
- }
-
- {
- // tag::flush-synced-notfound
- try {
- SyncedFlushRequest request = new SyncedFlushRequest("does_not_exist");
- client.indices().flushSynced(request, RequestOptions.DEFAULT);
- } catch (OpenSearchException exception) {
- if (exception.status() == RestStatus.NOT_FOUND) {
- // <1>
- }
- }
- // end::flush-synced-notfound
- }
- }
-
public void testGetSettings() throws Exception {
RestHighLevelClient client = highLevelClient();
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/QueryDSLDocumentationTests.java
index 47a116458cb96..9f5c2e51a7960 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/QueryDSLDocumentationTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/QueryDSLDocumentationTests.java
@@ -88,7 +88,6 @@
import static org.opensearch.index.query.QueryBuilders.spanWithinQuery;
import static org.opensearch.index.query.QueryBuilders.termQuery;
import static org.opensearch.index.query.QueryBuilders.termsQuery;
-import static org.opensearch.index.query.QueryBuilders.typeQuery;
import static org.opensearch.index.query.QueryBuilders.wildcardQuery;
import static org.opensearch.index.query.QueryBuilders.wrapperQuery;
import static org.opensearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
@@ -447,12 +446,6 @@ public void testTerms() {
// end::terms
}
- public void testType() {
- // tag::type
- typeQuery("my_type"); // <1>
- // end::type
- }
-
public void testWildcard() {
// tag::wildcard
wildcardQuery(
diff --git a/client/sniffer/licenses/jackson-core-2.12.5.jar.sha1 b/client/sniffer/licenses/jackson-core-2.12.5.jar.sha1
deleted file mode 100644
index ed27d8a96bf20..0000000000000
--- a/client/sniffer/licenses/jackson-core-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-725e364cc71b80e60fa450bd06d75cdea7fb2d59
\ No newline at end of file
diff --git a/client/sniffer/licenses/jackson-core-2.12.6.jar.sha1 b/client/sniffer/licenses/jackson-core-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..d62c70d6b0f11
--- /dev/null
+++ b/client/sniffer/licenses/jackson-core-2.12.6.jar.sha1
@@ -0,0 +1 @@
+5bf206c0b5982cfcd868b3d9349dc5190db8bab5
\ No newline at end of file
diff --git a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
index 2a6362c611329..adddb3bda725c 100644
--- a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
+++ b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
@@ -160,9 +160,8 @@ public void run() {
// tasks are run by a single threaded executor, so swapping is safe with a simple volatile variable
ScheduledTask previousTask = nextScheduledTask;
nextScheduledTask = new ScheduledTask(task, future);
- assert initialized.get() == false
- || previousTask.task.isSkipped()
- || previousTask.task.hasStarted() : "task that we are replacing is neither " + "cancelled nor has it ever started";
+ assert initialized.get() == false || previousTask.task.isSkipped() || previousTask.task.hasStarted()
+ : "task that we are replacing is neither " + "cancelled nor has it ever started";
}
}
diff --git a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/RequestsWithoutContentIT.java b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/RequestsWithoutContentIT.java
index 474f7052b8450..92c35ccf316c7 100644
--- a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/RequestsWithoutContentIT.java
+++ b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/RequestsWithoutContentIT.java
@@ -43,7 +43,7 @@ public class RequestsWithoutContentIT extends OpenSearchRestTestCase {
public void testIndexMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () ->
- client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/idx/type/123")));
+ client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/idx/_doc/123")));
assertResponseException(responseException, "request body is required");
}
diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile
index 94f3f03e5fa4a..c9be5c632cb59 100644
--- a/distribution/docker/src/docker/Dockerfile
+++ b/distribution/docker/src/docker/Dockerfile
@@ -63,7 +63,9 @@ FROM ${base_image}
ENV OPENSEARCH_CONTAINER true
-RUN for iter in {1..10}; do \\
+RUN sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-Linux-* && \\
+ sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-Linux-* && \\
+ for iter in {1..10}; do \\
${package_manager} update --setopt=tsflags=nodocs -y && \\
${package_manager} install --setopt=tsflags=nodocs -y \\
nc shadow-utils zip unzip && \\
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index e5c75af5188e5..93a82ff324835 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -63,7 +63,7 @@ import java.util.regex.Pattern
*/
plugins {
- id "nebula.ospackage-base" version "9.0.0"
+ id "nebula.ospackage-base" version "9.1.1"
}
void addProcessFilesTask(String type, boolean jdk) {
diff --git a/distribution/packages/src/common/env/opensearch b/distribution/packages/src/common/env/opensearch
index 52a2639abbc46..198bcfde90c4c 100644
--- a/distribution/packages/src/common/env/opensearch
+++ b/distribution/packages/src/common/env/opensearch
@@ -6,7 +6,7 @@
#OPENSEARCH_HOME=/usr/share/opensearch
# OpenSearch Java path
-#JAVA_HOME=
+#OPENSEARCH_JAVA_HOME=
# OpenSearch configuration directory
# Note: this setting will be shared with command-line tools
diff --git a/distribution/packages/src/deb/init.d/opensearch b/distribution/packages/src/deb/init.d/opensearch
index cc95b465c88d6..e5195d2d54dba 100755
--- a/distribution/packages/src/deb/init.d/opensearch
+++ b/distribution/packages/src/deb/init.d/opensearch
@@ -66,8 +66,9 @@ DAEMON=$OPENSEARCH_HOME/bin/opensearch
DAEMON_OPTS="-d -p $PID_FILE"
export OPENSEARCH_JAVA_OPTS
-export JAVA_HOME
export OPENSEARCH_PATH_CONF
+export JAVA_HOME
+export OPENSEARCH_JAVA_HOME
if [ ! -x "$DAEMON" ]; then
echo "The opensearch startup script does not exists or it is not executable, tried: $DAEMON"
diff --git a/distribution/packages/src/rpm/init.d/opensearch b/distribution/packages/src/rpm/init.d/opensearch
index d31fca9d2d1a7..12a1470e75acb 100644
--- a/distribution/packages/src/rpm/init.d/opensearch
+++ b/distribution/packages/src/rpm/init.d/opensearch
@@ -53,6 +53,7 @@ export OPENSEARCH_JAVA_OPTS
export JAVA_HOME
export OPENSEARCH_PATH_CONF
export OPENSEARCH_STARTUP_SLEEP_TIME
+export OPENSEARCH_JAVA_HOME
lockfile=/var/lock/subsys/$prog
diff --git a/distribution/src/bin/opensearch-env b/distribution/src/bin/opensearch-env
index 99bded2ad0e52..6fe703a73b2de 100644
--- a/distribution/src/bin/opensearch-env
+++ b/distribution/src/bin/opensearch-env
@@ -44,8 +44,11 @@ OPENSEARCH_HOME=`dirname "$OPENSEARCH_HOME"`
# now set the classpath
OPENSEARCH_CLASSPATH="$OPENSEARCH_HOME/lib/*"
-# now set the path to java
-if [ ! -z "$JAVA_HOME" ]; then
+# now set the path to java: OPENSEARCH_JAVA_HOME -> JAVA_HOME -> bundled JDK
+if [ ! -z "$OPENSEARCH_JAVA_HOME" ]; then
+ JAVA="$OPENSEARCH_JAVA_HOME/bin/java"
+ JAVA_TYPE="OPENSEARCH_JAVA_HOME"
+elif [ ! -z "$JAVA_HOME" ]; then
JAVA="$JAVA_HOME/bin/java"
JAVA_TYPE="JAVA_HOME"
else
diff --git a/distribution/src/bin/opensearch-env.bat b/distribution/src/bin/opensearch-env.bat
index 244aa4452d581..bc8a6ce53a5f5 100644
--- a/distribution/src/bin/opensearch-env.bat
+++ b/distribution/src/bin/opensearch-env.bat
@@ -39,16 +39,19 @@ if "%1" == "nojava" (
exit /b
)
-rem compariing to empty string makes this equivalent to bash -v check on env var
+rem comparing to empty string makes this equivalent to bash -v check on env var
rem and allows to effectively force use of the bundled jdk when launching OpenSearch
-rem by setting JAVA_HOME=
-if "%JAVA_HOME%" == "" (
+rem by setting OPENSEARCH_JAVA_HOME= and JAVA_HOME=
+if not "%OPENSEARCH_JAVA_HOME%" == "" (
+ set JAVA="%OPENSEARCH_JAVA_HOME%\bin\java.exe"
+ set JAVA_TYPE=OPENSEARCH_JAVA_HOME
+) else if not "%JAVA_HOME%" == "" (
+ set JAVA="%JAVA_HOME%\bin\java.exe"
+ set JAVA_TYPE=JAVA_HOME
+) else (
set JAVA="%OPENSEARCH_HOME%\jdk\bin\java.exe"
set JAVA_HOME="%OPENSEARCH_HOME%\jdk"
set JAVA_TYPE=bundled jdk
-) else (
- set JAVA="%JAVA_HOME%\bin\java.exe"
- set JAVA_TYPE=JAVA_HOME
)
if not exist !JAVA! (
diff --git a/distribution/tools/keystore-cli/build.gradle b/distribution/tools/keystore-cli/build.gradle
index 670c898019d28..05dddbed501af 100644
--- a/distribution/tools/keystore-cli/build.gradle
+++ b/distribution/tools/keystore-cli/build.gradle
@@ -35,5 +35,5 @@ dependencies {
compileOnly project(":libs:opensearch-cli")
testImplementation project(":test:framework")
testImplementation 'com.google.jimfs:jimfs:1.1'
- testRuntimeOnly 'com.google.guava:guava:30.1.1-jre'
+ testRuntimeOnly 'com.google.guava:guava:31.0.1-jre'
}
diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle
index d96fced1ec293..2f3ede7194a6d 100644
--- a/distribution/tools/plugin-cli/build.gradle
+++ b/distribution/tools/plugin-cli/build.gradle
@@ -38,8 +38,8 @@ dependencies {
api "org.bouncycastle:bcpg-fips:1.0.5.1"
api "org.bouncycastle:bc-fips:1.0.2.1"
testImplementation project(":test:framework")
- testImplementation 'com.google.jimfs:jimfs:1.1'
- testRuntimeOnly 'com.google.guava:guava:30.1.1-jre'
+ testImplementation 'com.google.jimfs:jimfs:1.2'
+ testRuntimeOnly 'com.google.guava:guava:31.0.1-jre'
}
tasks.named("dependencyLicenses").configure {
diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
index b404614ca435b..8acf137043a92 100644
--- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
+++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java
@@ -218,11 +218,23 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
Arrays.asList("b", "batch"),
"Enable batch mode explicitly, automatic confirmation of security permission"
);
- this.arguments = parser.nonOptions("plugin id");
+ this.arguments = parser.nonOptions("plugin ");
}
@Override
protected void printAdditionalHelp(Terminal terminal) {
+ terminal.println("Plugins are packaged as zip files. Each packaged plugin must contain a plugin properties file.");
+ terminal.println("");
+
+ // List possible plugin id inputs
+ terminal.println("The install command takes a plugin id, which may be any of the following:");
+ terminal.println(" An official opensearch plugin name");
+ terminal.println(" Maven coordinates to a plugin zip");
+ terminal.println(" A URL to a plugin zip");
+ terminal.println(" A local zip file");
+ terminal.println("");
+
+ // List official opensearch plugin names
terminal.println("The following official plugins may be installed by name:");
for (String plugin : OFFICIAL_PLUGINS) {
terminal.println(" " + plugin);
@@ -401,7 +413,7 @@ private String getMavenUrl(Terminal terminal, String[] coordinates, String platf
boolean urlExists(Terminal terminal, String urlString) throws IOException {
terminal.println(VERBOSE, "Checking if url exists: " + urlString);
URL url = new URL(urlString);
- assert "https".equals(url.getProtocol()) : "Only http urls can be checked";
+ assert "https".equals(url.getProtocol()) : "Use of https protocol is required";
HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.addRequestProperty("User-Agent", "opensearch-plugin-installer");
urlConnection.setRequestMethod("HEAD");
diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
index a57050540a216..e0e5cbc54276e 100644
--- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
+++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
@@ -828,6 +828,31 @@ protected boolean addShutdownHook() {
}
}
+ public void testPluginsHelpNonOptionArgumentsOutput() throws Exception {
+ MockTerminal terminal = new MockTerminal();
+ new InstallPluginCommand() {
+ @Override
+ protected boolean addShutdownHook() {
+ return false;
+ }
+ }.main(new String[] { "--help" }, terminal);
+ try (BufferedReader reader = new BufferedReader(new StringReader(terminal.getOutput()))) {
+
+ // grab first line of --help output
+ String line = reader.readLine();
+
+ // find the beginning of Non-option arguments list
+ while (line.contains("Non-option arguments:") == false) {
+ line = reader.readLine();
+ }
+
+ // check that non option agrument list contains correct string
+ line = reader.readLine();
+ assertThat(line, containsString(""));
+
+ }
+ }
+
public void testInstallMisspelledOfficialPlugins() throws Exception {
Tuple env = createEnv(fs, temp);
diff --git a/distribution/tools/upgrade-cli/build.gradle b/distribution/tools/upgrade-cli/build.gradle
index 5018a4bb8702e..29d06b89395c6 100644
--- a/distribution/tools/upgrade-cli/build.gradle
+++ b/distribution/tools/upgrade-cli/build.gradle
@@ -19,7 +19,7 @@ dependencies {
implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
testImplementation project(":test:framework")
testImplementation 'com.google.jimfs:jimfs:1.2'
- testRuntimeOnly 'com.google.guava:guava:30.1.1-jre'
+ testRuntimeOnly 'com.google.guava:guava:31.0.1-jre'
}
tasks.named("dependencyLicenses").configure {
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.12.5.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.12.5.jar.sha1
deleted file mode 100644
index 797bcf2b161d4..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-52d929d5bb21d0186fe24c09624cc3ee4bafc3b3
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.12.6.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..48ee3bf53c630
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.12.6.jar.sha1
@@ -0,0 +1 @@
+9487231edd6b0b1f14692c9cba9e0462809215d1
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.12.5.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.12.5.jar.sha1
deleted file mode 100644
index ca1bd46bc3cd3..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b064cf057f23d3d35390328c5030847efeffedde
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.12.6.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..f74842887d31b
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.12.6.jar.sha1
@@ -0,0 +1 @@
+fac216b606c1086e36acea6e572ee61572ad1670
\ No newline at end of file
diff --git a/gradle.properties b/gradle.properties
index 0974eb6d6b7da..53b593923ce26 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -11,7 +11,12 @@
org.gradle.warning.mode=none
org.gradle.parallel=true
-org.gradle.jvmargs=-Xmx3g -XX:+HeapDumpOnOutOfMemoryError -Xss2m
+org.gradle.jvmargs=-Xmx3g -XX:+HeapDumpOnOutOfMemoryError -Xss2m \
+ --add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED \
+ --add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED \
+ --add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED \
+ --add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED \
+ --add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED
options.forkOptions.memoryMaximumSize=2g
# Disable duplicate project id detection
diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle
index 9abaf35e589be..7f1e9cb8d04b3 100644
--- a/libs/cli/build.gradle
+++ b/libs/cli/build.gradle
@@ -32,7 +32,7 @@ apply plugin: 'nebula.optional-base'
apply plugin: 'opensearch.publish'
dependencies {
- api 'net.sf.jopt-simple:jopt-simple:5.0.2'
+ api 'net.sf.jopt-simple:jopt-simple:5.0.4'
api project(':libs:opensearch-core')
}
diff --git a/libs/cli/licenses/jopt-simple-5.0.2.jar.sha1 b/libs/cli/licenses/jopt-simple-5.0.2.jar.sha1
deleted file mode 100644
index b50ed4fea3bd1..0000000000000
--- a/libs/cli/licenses/jopt-simple-5.0.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-98cafc6081d5632b61be2c9e60650b64ddbc637c
\ No newline at end of file
diff --git a/libs/cli/licenses/jopt-simple-5.0.4.jar.sha1 b/libs/cli/licenses/jopt-simple-5.0.4.jar.sha1
new file mode 100644
index 0000000000000..7ade81efe4d0d
--- /dev/null
+++ b/libs/cli/licenses/jopt-simple-5.0.4.jar.sha1
@@ -0,0 +1 @@
+4fdac2fbe92dfad86aa6e9301736f6b4342a3f5c
\ No newline at end of file
diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle
index b324bba381a26..ce23406721fe6 100644
--- a/libs/grok/build.gradle
+++ b/libs/grok/build.gradle
@@ -29,7 +29,7 @@
*/
dependencies {
- api 'org.jruby.joni:joni:2.1.29'
+ api 'org.jruby.joni:joni:2.1.41'
// joni dependencies:
api 'org.jruby.jcodings:jcodings:1.0.44'
@@ -41,3 +41,7 @@ dependencies {
tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'jdk-signatures'
}
+
+thirdPartyAudit.ignoreMissingClasses(
+ 'org.jcodings.unicode.UnicodeCodeRange'
+)
\ No newline at end of file
diff --git a/libs/grok/licenses/joni-2.1.29.jar.sha1 b/libs/grok/licenses/joni-2.1.29.jar.sha1
deleted file mode 100644
index 251ff2ec05a19..0000000000000
--- a/libs/grok/licenses/joni-2.1.29.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3cb751702e1194ff24259155db4d37e9383d4320
\ No newline at end of file
diff --git a/libs/grok/licenses/joni-2.1.41.jar.sha1 b/libs/grok/licenses/joni-2.1.41.jar.sha1
new file mode 100644
index 0000000000000..4f0a0a8393dd0
--- /dev/null
+++ b/libs/grok/licenses/joni-2.1.41.jar.sha1
@@ -0,0 +1 @@
+4a35f4eaef792073bc081b756b1f4949879cd41e
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-core-2.12.5.jar.sha1 b/libs/x-content/licenses/jackson-core-2.12.5.jar.sha1
deleted file mode 100644
index ed27d8a96bf20..0000000000000
--- a/libs/x-content/licenses/jackson-core-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-725e364cc71b80e60fa450bd06d75cdea7fb2d59
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-core-2.12.6.jar.sha1 b/libs/x-content/licenses/jackson-core-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..d62c70d6b0f11
--- /dev/null
+++ b/libs/x-content/licenses/jackson-core-2.12.6.jar.sha1
@@ -0,0 +1 @@
+5bf206c0b5982cfcd868b3d9349dc5190db8bab5
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.12.5.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.12.5.jar.sha1
deleted file mode 100644
index f992f732a7f91..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-cbor-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2b6f24ee5ac7cde7f5a4e574bd0af4a72ecb55f6
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.12.6.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..9fdb5a5012b8d
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.12.6.jar.sha1
@@ -0,0 +1 @@
+3cd2e6a538f73483c6c59c354ce2276bcdc5ba7b
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.12.5.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.12.5.jar.sha1
deleted file mode 100644
index 9e2c3222014c2..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-smile-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4233326c74c6601fbbeea11c103c011859cc687d
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.12.6.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..a8eb043684fac
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-smile-2.12.6.jar.sha1
@@ -0,0 +1 @@
+bc9b6bcf12a14382424324ee067456ee354a0dfb
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.12.5.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.12.5.jar.sha1
deleted file mode 100644
index 6eba2dad4947f..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-yaml-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-89ec27e5f422b0749b1133137c1b36debbb5f3bc
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.12.6.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..e2ed10942b3b6
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.12.6.jar.sha1
@@ -0,0 +1 @@
+cfba448bc4e92b8656968756a9c4af1ad8e502e4
\ No newline at end of file
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ArabicAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ArabicAnalyzerProvider.java
index 397c1b2c922b0..40239cbf38567 100644
--- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ArabicAnalyzerProvider.java
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ArabicAnalyzerProvider.java
@@ -50,7 +50,6 @@ public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider> getTokenFilters() {
filters.put("classic", ClassicFilterFactory::new);
filters.put("czech_stem", CzechStemTokenFilterFactory::new);
filters.put("common_grams", requiresAnalysisSettings(CommonGramsTokenFilterFactory::new));
+ filters.put("concatenate_graph", ConcatenateGraphTokenFilterFactory::new);
filters.put(
"condition",
requiresAnalysisSettings((i, e, n, s) -> new ScriptedConditionTokenFilterFactory(i, n, s, scriptService.get()))
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java
new file mode 100644
index 0000000000000..0d1a2b185d1d3
--- /dev/null
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java
@@ -0,0 +1,81 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.analysis.common;
+
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter;
+import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
+import org.opensearch.LegacyESVersion;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.env.Environment;
+import org.opensearch.index.IndexSettings;
+import org.opensearch.index.analysis.AbstractTokenFilterFactory;
+
+/**
+ * Factory for {@link ConcatenateGraphFilter}.
+ * Adopted from {@link org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilterFactory}, with some changes to
+ * default values: token_separator is a "space", preserve_position_increments is false to avoid duplicated separators,
+ * max_graph_expansions is 100 as the default value of 10_000 seems to be unnecessarily large and preserve_separator is false.
+ *
+ *
+ * preserve_separator:
+ * For LegacyESVersion lesser than {@link LegacyESVersion#V_7_6_0} i.e. lucene versions lesser
+ * than {@link org.apache.lucene.util.Version#LUCENE_8_4_0}
+ * Whether {@link ConcatenateGraphFilter#SEP_LABEL} should separate the input tokens in the concatenated token.
+ *
+ * token_separator:
+ * Separator to use for concatenation. Must be a String with a single character or empty.
+ * If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_TOKEN_SEPARATOR} will be used.
+ * If empty i.e. "", tokens will be concatenated without any separators.
+ *
+ * preserve_position_increments:
+ * Whether to add an empty token for missing positions.
+ * If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_PRESERVE_POSITION_INCREMENTS} will be used.
+ *
+ * max_graph_expansions:
+ * If the tokenStream graph has more than this many possible paths through, then we'll throw
+ * {@link TooComplexToDeterminizeException} to preserve the stability and memory of the
+ * machine.
+ * If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_MAX_GRAPH_EXPANSIONS} will be used.
+ *
+ *
+ * @see ConcatenateGraphFilter
+ */
+public class ConcatenateGraphTokenFilterFactory extends AbstractTokenFilterFactory {
+ public static final String DEFAULT_TOKEN_SEPARATOR = " ";
+ public static final int DEFAULT_MAX_GRAPH_EXPANSIONS = 100;
+ public static final boolean DEFAULT_PRESERVE_POSITION_INCREMENTS = false;
+
+ private final Character tokenSeparator;
+ private final int maxGraphExpansions;
+ private final boolean preservePositionIncrements;
+
+ ConcatenateGraphTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
+ super(indexSettings, name, settings);
+
+ if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) { // i.e. Lucene 8.4.0
+ String separator = settings.get("token_separator", DEFAULT_TOKEN_SEPARATOR);
+ if (separator.length() > 1) {
+ throw new IllegalArgumentException("token_separator must be either empty or a single character");
+ }
+ tokenSeparator = separator.length() == 0 ? null : separator.charAt(0); // null means no separator while concatenating
+ } else {
+ boolean preserveSep = settings.getAsBoolean("preserve_separator", ConcatenateGraphFilter.DEFAULT_PRESERVE_SEP);
+ tokenSeparator = preserveSep ? ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR : null;
+ }
+
+ maxGraphExpansions = settings.getAsInt("max_graph_expansions", DEFAULT_MAX_GRAPH_EXPANSIONS);
+ preservePositionIncrements = settings.getAsBoolean("preserve_position_increments", DEFAULT_PRESERVE_POSITION_INCREMENTS);
+ }
+
+ @Override
+ public TokenStream create(TokenStream tokenStream) {
+ return new ConcatenateGraphFilter(tokenStream, tokenSeparator, preservePositionIncrements, maxGraphExpansions);
+ }
+}
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CzechAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CzechAnalyzerProvider.java
index b4d3820767c7b..f6bc6b3f7ea02 100644
--- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CzechAnalyzerProvider.java
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CzechAnalyzerProvider.java
@@ -50,7 +50,6 @@ public class CzechAnalyzerProvider extends AbstractIndexAnalyzerProvider "Is" --> "Awe" --> "Some"
+ // Expected output from word_delimiter_graph is a graph:
+ // ---> "Power" --> "Shot" ---> "Is" ---> "Awe" ---> "Some" ---
+ // | | | |
+ // --> "PowerShot" -------- --> "AweSome" ---------
+ // and this filter will traverse through all possible paths to produce concatenated tokens
+ String[] expected = new String[] {
+ "Power Shot Is Awe Some",
+ "Power Shot Is AweSome",
+ "PowerShot Is Awe Some",
+ "PowerShot Is AweSome" };
+
+ // all tokens will be in the same position
+ int[] expectedPosIncrements = new int[] { 1, 0, 0, 0 };
+ int[] expectedPosLengths = new int[] { 1, 1, 1, 1 };
+
+ NamedAnalyzer analyzer = analysis.indexAnalyzers.get("my_analyzer");
+ assertAnalyzesToPositions(analyzer, source, expected, expectedPosIncrements, expectedPosLengths);
+ }
+
+ public void testInvalidSeparator() {
+ expectThrows(
+ IllegalArgumentException.class,
+ () -> AnalysisTestsHelper.createTestAnalysisFromSettings(
+ Settings.builder()
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
+ .put("index.analysis.filter.my_concatenate_graph.token_separator", "11")
+ .build(),
+ new CommonAnalysisPlugin()
+ )
+ );
+ }
+
+ /**
+ * Similar to the {@link #testGraph()} case, there will be 4 paths generated by word_delimiter_graph.
+ * By setting max_graph_expansions to 3, we expect an exception.
+ */
+ public void testMaxGraphExpansion() throws IOException {
+ OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
+ Settings.builder()
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter_graph")
+ .put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
+ .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
+ .put("index.analysis.filter.my_concatenate_graph.max_graph_expansions", "3")
+ .put("index.analysis.analyzer.my_analyzer.type", "custom")
+ .put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace")
+ .put("index.analysis.analyzer.my_analyzer.filter", "my_word_delimiter, my_concatenate_graph")
+ .build(),
+ new CommonAnalysisPlugin()
+ );
+
+ String source = "PowerShot Is AweSome";
+
+ TokenStream tokenStream = analysis.indexAnalyzers.get("my_analyzer").tokenStream("dummy", source);
+
+ tokenStream.reset();
+
+ expectThrows(TooComplexToDeterminizeException.class, tokenStream::incrementToken);
+ }
+}
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
index bc7dd3b110287..08e6aa4aa4c1d 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
@@ -140,7 +140,6 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException {
client().prepareIndex("test", "test", "1").setSource("name", "ARCOTEL Hotels Deutschland").get();
refresh();
SearchResponse search = client().prepareSearch("test")
- .setTypes("test")
.setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR))
.highlighter(new HighlightBuilder().field("name.autocomplete"))
.get();
diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml
index 3bca0e1b950bb..56ed2175df60a 100644
--- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml
+++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml
@@ -24,9 +24,6 @@
---
"ngram_exception":
- - skip:
- version: " - 6.99.99"
- reason: only starting from version 7.x this throws an error
- do:
catch: /The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to[:] \[1\] but was \[2\]\. This limit can be set by changing the \[index.max_ngram_diff\] index level setting\./
indices.analyze:
diff --git a/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java
index babf024da019b..6efd7cbcd9c41 100644
--- a/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java
+++ b/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java
@@ -209,7 +209,7 @@ public Settings onNodeStopped(String nodeName) {
)
);
- Map source = client().prepareGet("index", "doc", "1").get().getSource();
+ Map source = client().prepareGet("index", "1").get().getSource();
assertThat(source.get("x"), equalTo(0));
assertThat(source.get("y"), equalTo(0));
}
@@ -242,7 +242,7 @@ public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exceptio
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
- Map source = client().prepareGet("index", "doc", "1").get().getSource();
+ Map source = client().prepareGet("index", "1").get().getSource();
assertThat(source.get("x"), equalTo(0));
assertThat(source.get("y"), equalTo(0));
assertThat(source.get("z"), equalTo(0));
@@ -260,7 +260,7 @@ public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exceptio
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
- source = client().prepareGet("index", "doc", "2").get().getSource();
+ source = client().prepareGet("index", "2").get().getSource();
assertThat(source.get("x"), equalTo(0));
assertThat(source.get("y"), equalTo(0));
assertThat(source.get("z"), equalTo(0));
@@ -281,7 +281,7 @@ public void testWithDedicatedIngestNode() throws Exception {
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
- Map source = client().prepareGet("index", "doc", "1").get().getSource();
+ Map source = client().prepareGet("index", "1").get().getSource();
assertThat(source.get("x"), equalTo(0));
assertThat(source.get("y"), equalTo(0));
@@ -294,7 +294,7 @@ public void testWithDedicatedIngestNode() throws Exception {
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
- source = client(ingestNode).prepareGet("index", "doc", "2").get().getSource();
+ source = client(ingestNode).prepareGet("index", "2").get().getSource();
assertThat(source.get("x"), equalTo(0));
assertThat(source.get("y"), equalTo(0));
}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java
index 820ef3a8ee9c2..1ff2aa7fdd629 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java
@@ -60,7 +60,6 @@ public void testJavaPattern() throws Exception {
);
IngestDocument document = new IngestDocument(
"_index",
- "_type",
"_id",
null,
null,
@@ -83,7 +82,6 @@ public void testTAI64N() throws Exception {
);
IngestDocument document = new IngestDocument(
"_index",
- "_type",
"_id",
null,
null,
@@ -104,19 +102,11 @@ public void testUnixMs() throws Exception {
"m",
"yyyyMMdd"
);
- IngestDocument document = new IngestDocument(
- "_index",
- "_type",
- "_id",
- null,
- null,
- null,
- Collections.singletonMap("_field", "1000500")
- );
+ IngestDocument document = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("_field", "1000500"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
- document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", 1000500L));
+ document = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("_field", 1000500L));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
}
@@ -131,15 +121,7 @@ public void testUnix() throws Exception {
"m",
"yyyyMMdd"
);
- IngestDocument document = new IngestDocument(
- "_index",
- "_type",
- "_id",
- null,
- null,
- null,
- Collections.singletonMap("_field", "1000.5")
- );
+ IngestDocument document = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("_field", "1000.5"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
}
@@ -160,7 +142,7 @@ public void testTemplatedFields() throws Exception {
indexNameFormat
);
- IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", date));
+ IngestDocument document = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("_field", date));
dateProcessor.execute(document);
assertThat(
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java
index 6f44b81e7b43b..ca0c0df40f009 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java
@@ -55,7 +55,6 @@ public class DissectProcessorTests extends OpenSearchTestCase {
public void testMatch() {
IngestDocument ingestDocument = new IngestDocument(
"_index",
- "_type",
"_id",
null,
null,
@@ -72,7 +71,6 @@ public void testMatch() {
public void testMatchOverwrite() {
IngestDocument ingestDocument = new IngestDocument(
"_index",
- "_type",
"_id",
null,
null,
@@ -90,7 +88,6 @@ public void testMatchOverwrite() {
public void testAdvancedMatch() {
IngestDocument ingestDocument = new IngestDocument(
"_index",
- "_type",
"_id",
null,
null,
@@ -116,7 +113,6 @@ public void testAdvancedMatch() {
public void testMiss() {
IngestDocument ingestDocument = new IngestDocument(
"_index",
- "_type",
"_id",
null,
null,
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java
index f0c61700f4db0..8db3cefc3a6fd 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java
@@ -61,15 +61,7 @@ public void testExecuteWithAsyncProcessor() throws Exception {
values.add("foo");
values.add("bar");
values.add("baz");
- IngestDocument ingestDocument = new IngestDocument(
- "_index",
- "_type",
- "_id",
- null,
- null,
- null,
- Collections.singletonMap("values", values)
- );
+ IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values", values));
ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", new AsyncUpperCaseProcessor("_ingest._value"), false);
processor.execute(ingestDocument, (result, e) -> {});
@@ -87,7 +79,6 @@ public void testExecuteWithAsyncProcessor() throws Exception {
public void testExecuteWithFailure() throws Exception {
IngestDocument ingestDocument = new IngestDocument(
"_index",
- "_type",
"_id",
null,
null,
@@ -132,15 +123,7 @@ public void testMetadataAvailable() throws Exception {
List> values = new ArrayList<>();
values.add(new HashMap<>());
values.add(new HashMap<>());
- IngestDocument ingestDocument = new IngestDocument(
- "_index",
- "_type",
- "_id",
- null,
- null,
- null,
- Collections.singletonMap("values", values)
- );
+ IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values", values));
TestProcessor innerProcessor = new TestProcessor(id -> {
id.setFieldValue("_ingest._value.index", id.getSourceAndMetadata().get("_index"));
@@ -152,10 +135,8 @@ public void testMetadataAvailable() throws Exception {
assertThat(innerProcessor.getInvokedCounter(), equalTo(2));
assertThat(ingestDocument.getFieldValue("values.0.index", String.class), equalTo("_index"));
- assertThat(ingestDocument.getFieldValue("values.0.type", String.class), equalTo("_type"));
assertThat(ingestDocument.getFieldValue("values.0.id", String.class), equalTo("_id"));
assertThat(ingestDocument.getFieldValue("values.1.index", String.class), equalTo("_index"));
- assertThat(ingestDocument.getFieldValue("values.1.type", String.class), equalTo("_type"));
assertThat(ingestDocument.getFieldValue("values.1.id", String.class), equalTo("_id"));
}
@@ -170,7 +151,7 @@ public void testRestOfTheDocumentIsAvailable() throws Exception {
document.put("values", values);
document.put("flat_values", new ArrayList<>());
document.put("other", "value");
- IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, document);
+ IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, document);
ForEachProcessor processor = new ForEachProcessor(
"_tag",
@@ -220,15 +201,7 @@ public String getDescription() {
int numValues = randomIntBetween(1, 10000);
List values = IntStream.range(0, numValues).mapToObj(i -> "").collect(Collectors.toList());
- IngestDocument ingestDocument = new IngestDocument(
- "_index",
- "_type",
- "_id",
- null,
- null,
- null,
- Collections.singletonMap("values", values)
- );
+ IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values", values));
ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", innerProcessor, false);
processor.execute(ingestDocument, (result, e) -> {});
@@ -244,15 +217,7 @@ public void testModifyFieldsOutsideArray() throws Exception {
values.add("string");
values.add(1);
values.add(null);
- IngestDocument ingestDocument = new IngestDocument(
- "_index",
- "_type",
- "_id",
- null,
- null,
- null,
- Collections.singletonMap("values", values)
- );
+ IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values", values));
TemplateScript.Factory template = new TestTemplateService.MockTemplateScript.Factory("errors");
@@ -290,7 +255,7 @@ public void testScalarValueAllowsUnderscoreValueFieldToRemainAccessible() throws
Map source = new HashMap<>();
source.put("_value", "new_value");
source.put("values", values);
- IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, source);
+ IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, source);
TestProcessor processor = new TestProcessor(
doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_source._value", String.class))
@@ -320,15 +285,7 @@ public void testNestedForEach() throws Exception {
value.put("values2", innerValues);
values.add(value);
- IngestDocument ingestDocument = new IngestDocument(
- "_index",
- "_type",
- "_id",
- null,
- null,
- null,
- Collections.singletonMap("values1", values)
- );
+ IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values1", values));
TestProcessor testProcessor = new TestProcessor(
doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_ingest._value", String.class).toUpperCase(Locale.ENGLISH))
@@ -352,7 +309,7 @@ public void testNestedForEach() throws Exception {
}
public void testIgnoreMissing() throws Exception {
- IngestDocument originalIngestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.emptyMap());
+ IngestDocument originalIngestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.emptyMap());
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
TestProcessor testProcessor = new TestProcessor(doc -> {});
ForEachProcessor processor = new ForEachProcessor("_tag", null, "_ingest._value", testProcessor, true);
@@ -363,7 +320,7 @@ public void testIgnoreMissing() throws Exception {
public void testAppendingToTheSameField() {
Map source = Collections.singletonMap("field", Arrays.asList("a", "b"));
- IngestDocument originalIngestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, source);
+ IngestDocument originalIngestDocument = new IngestDocument("_index", "_id", null, null, null, source);
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
TestProcessor testProcessor = new TestProcessor(id -> id.appendFieldValue("field", "a"));
ForEachProcessor processor = new ForEachProcessor("_tag", null, "field", testProcessor, true);
@@ -375,7 +332,7 @@ public void testAppendingToTheSameField() {
public void testRemovingFromTheSameField() {
Map source = Collections.singletonMap("field", Arrays.asList("a", "b"));
- IngestDocument originalIngestDocument = new IngestDocument("_index", "_id", "_type", null, null, null, source);
+ IngestDocument originalIngestDocument = new IngestDocument("_index", "_id", null, null, null, source);
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
TestProcessor testProcessor = new TestProcessor(id -> id.removeField("field.0"));
ForEachProcessor processor = new ForEachProcessor("_tag", null, "field", testProcessor, true);
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml
index 709d4b9e62d43..916a7fe656cc2 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml
@@ -62,7 +62,6 @@ teardown:
catch: '/Unable to find match for dissect pattern: \%\{a\},\%\{b\},\%\{c\} against source: foo bar baz/'
index:
index: test
- type: test
id: 2
pipeline: "my_pipeline"
body: {message: "foo bar baz"}
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml
index 7fbf182eac05f..9142317ce1507 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml
@@ -31,7 +31,6 @@ teardown:
- do:
index:
index: test
- type: test
id: 1
pipeline: "my_pipeline"
body: >
@@ -42,7 +41,6 @@ teardown:
- do:
get:
index: test
- type: test
id: 1
- match: { _source.values: ["FOO", "BAR", "BAZ"] }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
index 2224d56165fd3..e012a82b15927 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml
@@ -32,7 +32,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar"
@@ -66,7 +65,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar"
@@ -97,7 +95,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar"
@@ -112,7 +109,7 @@ teardown:
- match: { error.root_cause.0.property_name: "field" }
---
-"Test simulate without index type and id":
+"Test simulate without id":
- do:
ingest.simulate:
body: >
@@ -166,7 +163,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar"
@@ -190,7 +186,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar"
@@ -223,7 +218,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar"
@@ -275,7 +269,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": {
@@ -335,7 +328,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"not_foo": "bar"
@@ -343,7 +335,6 @@ teardown:
},
{
"_index": "index",
- "_type": "type",
"_id": "id2",
"_source": {
"foo": "bar"
@@ -383,7 +374,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar",
@@ -392,7 +382,6 @@ teardown:
},
{
"_index": "index",
- "_type": "type",
"_id": "id2",
"_source": {
"foo": "5",
@@ -525,7 +514,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"field1": "123.42 400 "
@@ -602,7 +590,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"field1": "123.42 400 "
@@ -655,7 +642,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"field1": "123.42 400 "
@@ -729,7 +715,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"field1": "123.42 400 "
@@ -804,7 +789,6 @@ teardown:
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"field1": "123.42 400 "
diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle
index 4eedf598c3f87..f78dc49e9fb8a 100644
--- a/modules/ingest-geoip/build.gradle
+++ b/modules/ingest-geoip/build.gradle
@@ -39,11 +39,11 @@ opensearchplugin {
}
dependencies {
- api('com.maxmind.geoip2:geoip2:2.13.1')
+ api('com.maxmind.geoip2:geoip2:2.16.1')
// geoip2 dependencies:
api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}")
- api('com.maxmind.db:maxmind-db:1.3.1')
+ api('com.maxmind.db:maxmind-db:2.0.0')
testImplementation 'org.elasticsearch:geolite2-databases:20191119'
}
@@ -71,10 +71,8 @@ tasks.named("thirdPartyAudit").configure {
ignoreMissingClasses(
// geoip WebServiceClient needs apache http client, but we're not using WebServiceClient:
'org.apache.http.HttpEntity',
- 'org.apache.http.HttpHost',
'org.apache.http.HttpResponse',
'org.apache.http.StatusLine',
- 'org.apache.http.auth.UsernamePasswordCredentials',
'org.apache.http.client.config.RequestConfig$Builder',
'org.apache.http.client.config.RequestConfig',
'org.apache.http.client.methods.CloseableHttpResponse',
diff --git a/modules/ingest-geoip/licenses/geoip2-2.13.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-2.13.1.jar.sha1
deleted file mode 100644
index 253d9f12e7a3a..0000000000000
--- a/modules/ingest-geoip/licenses/geoip2-2.13.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f27d1a49d5a29dd4a7ac5006ce2eb16b8b9bb888
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..0221476794d3a
--- /dev/null
+++ b/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1
@@ -0,0 +1 @@
+c92040bd6ef2cb59be71c6749d08c141ca546caf
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.12.5.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.12.5.jar.sha1
deleted file mode 100644
index 797bcf2b161d4..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-annotations-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-52d929d5bb21d0186fe24c09624cc3ee4bafc3b3
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.12.6.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..48ee3bf53c630
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-annotations-2.12.6.jar.sha1
@@ -0,0 +1 @@
+9487231edd6b0b1f14692c9cba9e0462809215d1
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.12.5.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.12.5.jar.sha1
deleted file mode 100644
index ca1bd46bc3cd3..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-databind-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b064cf057f23d3d35390328c5030847efeffedde
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.12.6.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..f74842887d31b
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-databind-2.12.6.jar.sha1
@@ -0,0 +1 @@
+fac216b606c1086e36acea6e572ee61572ad1670
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/maxmind-db-1.3.1.jar.sha1 b/modules/ingest-geoip/licenses/maxmind-db-1.3.1.jar.sha1
deleted file mode 100644
index aebff2c3a849c..0000000000000
--- a/modules/ingest-geoip/licenses/maxmind-db-1.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-211bca628225bc0f719051b16deb03a747d7a14f
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1 b/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1
new file mode 100644
index 0000000000000..32c18f89c6a29
--- /dev/null
+++ b/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1
@@ -0,0 +1 @@
+e7e0fd82da0a160b7928ba214e699a7e6a74fff4
\ No newline at end of file
diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java
index 2ef5d8da000b1..e88c77b8e33f4 100644
--- a/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java
+++ b/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java
@@ -167,7 +167,7 @@ public void testLazyLoading() throws IOException {
internalCluster().getInstance(IngestService.class, ingestNode);
// the geo-IP database should not be loaded yet as we have no indexed any documents using a pipeline that has a geo-IP processor
assertDatabaseLoadStatus(ingestNode, false);
- final IndexRequest indexRequest = new IndexRequest("index", "_doc");
+ final IndexRequest indexRequest = new IndexRequest("index");
indexRequest.setPipeline("geoip");
indexRequest.source(Collections.singletonMap("ip", "1.1.1.1"));
final IndexResponse indexResponse = client().index(indexRequest).actionGet();
diff --git a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java
index 15ca93e0fbae4..cda2f5692b0db 100644
--- a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java
+++ b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java
@@ -286,7 +286,7 @@ public void testLazyLoading() throws Exception {
}
final Map field = Collections.singletonMap("_field", "1.1.1.1");
- final IngestDocument document = new IngestDocument("index", "type", "id", "routing", 1L, VersionType.EXTERNAL, field);
+ final IngestDocument document = new IngestDocument("index", "id", "routing", 1L, VersionType.EXTERNAL, field);
Map config = new HashMap<>();
config.put("field", "_field");
@@ -343,7 +343,7 @@ public void testLoadingCustomDatabase() throws IOException {
}
final Map field = Collections.singletonMap("_field", "1.1.1.1");
- final IngestDocument document = new IngestDocument("index", "type", "id", "routing", 1L, VersionType.EXTERNAL, field);
+ final IngestDocument document = new IngestDocument("index", "id", "routing", 1L, VersionType.EXTERNAL, field);
Map config = new HashMap<>();
config.put("field", "_field");
diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle
index f7d5b7d039afc..dabbfde754f92 100644
--- a/modules/lang-expression/build.gradle
+++ b/modules/lang-expression/build.gradle
@@ -37,8 +37,8 @@ opensearchplugin {
dependencies {
api "org.apache.lucene:lucene-expressions:${versions.lucene}"
- api 'org.antlr:antlr4-runtime:4.5.1-1'
- api 'org.ow2.asm:asm:5.0.4'
+ api 'org.antlr:antlr4-runtime:4.9.3'
+ api 'org.ow2.asm:asm:9.2'
api 'org.ow2.asm:asm-commons:5.0.4'
api 'org.ow2.asm:asm-tree:5.0.4'
}
diff --git a/modules/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 b/modules/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1
deleted file mode 100644
index f15e50069ba63..0000000000000
--- a/modules/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-66144204f9d6d7d3f3f775622c2dd7e9bd511d97
diff --git a/modules/lang-expression/licenses/antlr4-runtime-4.9.3.jar.sha1 b/modules/lang-expression/licenses/antlr4-runtime-4.9.3.jar.sha1
new file mode 100644
index 0000000000000..13a2367439ede
--- /dev/null
+++ b/modules/lang-expression/licenses/antlr4-runtime-4.9.3.jar.sha1
@@ -0,0 +1 @@
+81befc16ebedb8b8aea3e4c0835dd5ca7e8523a8
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/asm-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-5.0.4.jar.sha1
deleted file mode 100644
index 9223dba380f8c..0000000000000
--- a/modules/lang-expression/licenses/asm-5.0.4.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0da08b8cce7bbf903602a25a3a163ae252435795
diff --git a/modules/lang-expression/licenses/asm-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-9.2.jar.sha1
new file mode 100644
index 0000000000000..28f456d3cbcb2
--- /dev/null
+++ b/modules/lang-expression/licenses/asm-9.2.jar.sha1
@@ -0,0 +1 @@
+81a03f76019c67362299c40e0ba13405f5467bff
\ No newline at end of file
diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java
index 2a8236d5e0e4b..05064f66fef80 100644
--- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java
+++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java
@@ -85,7 +85,6 @@ public void testAllOpsDisabledIndexedScripts() throws IOException {
new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap()))
)
.setIndices("test")
- .setTypes("scriptTest")
.get();
fail("search script should have been rejected");
} catch (Exception e) {
diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java
index 0e0f21405818b..d0941cbc9452f 100644
--- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java
+++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java
@@ -77,7 +77,7 @@ public void setUp() throws Exception {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
- lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
+ lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
}
private FieldScript.LeafFactory compile(String expression) {
diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java
index 83b5c0930d1d0..f3559da59f992 100644
--- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java
+++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java
@@ -76,7 +76,7 @@ public void setUp() throws Exception {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
- lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
+ lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
}
private NumberSortScript.LeafFactory compile(String expression) {
diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java
index a71932ded1a7a..af7fc580f8a65 100644
--- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java
+++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java
@@ -76,7 +76,7 @@ public void setUp() throws Exception {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
- lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
+ lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
}
private TermsSetQueryScript.LeafFactory compile(String expression) {
diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle
index a26798bf90b91..511a6b144c21a 100644
--- a/modules/lang-mustache/build.gradle
+++ b/modules/lang-mustache/build.gradle
@@ -38,7 +38,7 @@ opensearchplugin {
}
dependencies {
- api "com.github.spullara.mustache.java:compiler:0.9.6"
+ api "com.github.spullara.mustache.java:compiler:0.9.10"
}
restResources {
diff --git a/modules/lang-mustache/licenses/compiler-0.9.10.jar.sha1 b/modules/lang-mustache/licenses/compiler-0.9.10.jar.sha1
new file mode 100644
index 0000000000000..6336318c2ce1a
--- /dev/null
+++ b/modules/lang-mustache/licenses/compiler-0.9.10.jar.sha1
@@ -0,0 +1 @@
+6111ae24e3be9ecbd75f5fe908583fc14b4f0174
\ No newline at end of file
diff --git a/modules/lang-mustache/licenses/compiler-0.9.6.jar.sha1 b/modules/lang-mustache/licenses/compiler-0.9.6.jar.sha1
deleted file mode 100644
index 9c0e54641475b..0000000000000
--- a/modules/lang-mustache/licenses/compiler-0.9.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1b8707299c34406ed0ba40bbf8513352ac4765c9
\ No newline at end of file
diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java
index df53fcc0c3b6f..7622eb55b7b49 100644
--- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java
+++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java
@@ -196,9 +196,11 @@ public void testIndexedTemplateClient() throws Exception {
Map templateParams = new HashMap<>();
templateParams.put("fieldParam", "foo");
- SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
- new SearchRequest("test").types("type")
- ).setScript("testTemplate").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
+ SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
+ .setScript("testTemplate")
+ .setScriptType(ScriptType.STORED)
+ .setScriptParams(templateParams)
+ .get();
assertHitCount(searchResponse.getResponse(), 4);
assertAcked(client().admin().cluster().prepareDeleteStoredScript("testTemplate"));
@@ -238,14 +240,16 @@ public void testIndexedTemplate() throws Exception {
Map templateParams = new HashMap<>();
templateParams.put("fieldParam", "foo");
- SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
- new SearchRequest().indices("test").types("type")
- ).setScript("1a").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
+ SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test"))
+ .setScript("1a")
+ .setScriptType(ScriptType.STORED)
+ .setScriptParams(templateParams)
+ .get();
assertHitCount(searchResponse.getResponse(), 4);
expectThrows(
ResourceNotFoundException.class,
- () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test").types("type"))
+ () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test"))
.setScript("1000")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
@@ -253,7 +257,7 @@ public void testIndexedTemplate() throws Exception {
);
templateParams.put("fieldParam", "bar");
- searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test").types("type"))
+ searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
.setScript("2")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
@@ -304,7 +308,7 @@ public void testIndexedTemplateOverwrite() throws Exception {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
- () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex").types("test"))
+ () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex"))
.setScript("git01")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
@@ -320,9 +324,11 @@ public void testIndexedTemplateOverwrite() throws Exception {
.setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(0))), XContentType.JSON)
);
- SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
- new SearchRequest("testindex").types("test")
- ).setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
+ SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex"))
+ .setScript("git01")
+ .setScriptType(ScriptType.STORED)
+ .setScriptParams(templateParams)
+ .get();
assertHitCount(searchResponse.getResponse(), 1);
}
}
@@ -360,9 +366,11 @@ public void testIndexedTemplateWithArray() throws Exception {
String[] fieldParams = { "foo", "bar" };
arrayTemplateParams.put("fieldParam", fieldParams);
- SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
- new SearchRequest("test").types("type")
- ).setScript("4").setScriptType(ScriptType.STORED).setScriptParams(arrayTemplateParams).get();
+ SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
+ .setScript("4")
+ .setScriptType(ScriptType.STORED)
+ .setScriptParams(arrayTemplateParams)
+ .get();
assertHitCount(searchResponse.getResponse(), 5);
}
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java
index c4c7ec9bf12b9..fc5a0ff601a00 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java
@@ -33,7 +33,6 @@
package org.opensearch.script.mustache;
import org.opensearch.client.node.NodeClient;
-import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@@ -53,9 +52,6 @@
import static org.opensearch.rest.RestRequest.Method.POST;
public class RestMultiSearchTemplateAction extends BaseRestHandler {
- private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchTemplateAction.class);
- static final String TYPES_DEPRECATION_MESSAGE = "[types removal]"
- + " Specifying types in multi search template requests is deprecated.";
private static final Set RESPONSE_PARAMS;
@@ -95,14 +91,6 @@ public String getName() {
@Override
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
MultiSearchTemplateRequest multiRequest = parseRequest(request, allowExplicitIndex);
-
- // Emit a single deprecation message if any search template contains types.
- for (SearchTemplateRequest searchTemplateRequest : multiRequest.requests()) {
- if (searchTemplateRequest.getRequest().types().length > 0) {
- deprecationLogger.deprecate("msearch_with_types", TYPES_DEPRECATION_MESSAGE);
- break;
- }
- }
return channel -> client.execute(MultiSearchTemplateAction.INSTANCE, multiRequest, new RestToXContentListener<>(channel));
}
diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java
index aaf3126876a59..1a663dcb18235 100644
--- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java
+++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java
@@ -69,13 +69,10 @@ public void testParseRequest() throws Exception {
assertThat(request.requests().get(0).getRequest().preference(), nullValue());
assertThat(request.requests().get(1).getRequest().indices()[0], equalTo("test2"));
assertThat(request.requests().get(1).getRequest().indices()[1], equalTo("test3"));
- assertThat(request.requests().get(1).getRequest().types()[0], equalTo("type1"));
assertThat(request.requests().get(1).getRequest().requestCache(), nullValue());
assertThat(request.requests().get(1).getRequest().preference(), equalTo("_local"));
assertThat(request.requests().get(2).getRequest().indices()[0], equalTo("test4"));
assertThat(request.requests().get(2).getRequest().indices()[1], equalTo("test1"));
- assertThat(request.requests().get(2).getRequest().types()[0], equalTo("type2"));
- assertThat(request.requests().get(2).getRequest().types()[1], equalTo("type1"));
assertThat(request.requests().get(2).getRequest().routing(), equalTo("123"));
assertNotNull(request.requests().get(0).getScript());
assertNotNull(request.requests().get(1).getScript());
diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java
deleted file mode 100644
index 655d49a0273b5..0000000000000
--- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package org.opensearch.script.mustache;
-
-import org.opensearch.common.bytes.BytesArray;
-import org.opensearch.common.settings.Settings;
-import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.rest.RestRequest;
-import org.opensearch.test.rest.FakeRestRequest;
-import org.opensearch.test.rest.RestActionTestCase;
-import org.junit.Before;
-
-import java.nio.charset.StandardCharsets;
-
-public class RestMultiSearchTemplateActionTests extends RestActionTestCase {
-
- @Before
- public void setUpAction() {
- controller().registerHandler(new RestMultiSearchTemplateAction(Settings.EMPTY));
- }
-
- public void testTypeInPath() {
- String content = "{ \"index\": \"some_index\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n";
- BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8));
-
- RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
- .withPath("/some_index/some_type/_msearch/template")
- .withContent(bytesContent, XContentType.JSON)
- .build();
- // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
- verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
-
- dispatchRequest(request);
- assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE);
- }
-
- public void testTypeInBody() {
- String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n";
- BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8));
-
- RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/some_index/_msearch/template")
- .withContent(bytesContent, XContentType.JSON)
- .build();
- // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
- verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
-
- dispatchRequest(request);
- assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE);
- }
-}
diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java
deleted file mode 100644
index 4f95da755f8fc..0000000000000
--- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package org.opensearch.script.mustache;
-
-import org.opensearch.rest.RestRequest;
-import org.opensearch.rest.action.search.RestSearchAction;
-import org.opensearch.test.rest.FakeRestRequest;
-import org.opensearch.test.rest.RestActionTestCase;
-import org.junit.Before;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class RestSearchTemplateActionTests extends RestActionTestCase {
-
- @Before
- public void setUpAction() {
- controller().registerHandler(new RestSearchTemplateAction());
- }
-
- public void testTypeInPath() {
- RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
- .withPath("/some_index/some_type/_search/template")
- .build();
-
- dispatchRequest(request);
- assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
- }
-
- public void testTypeParameter() {
- Map params = new HashMap<>();
- params.put("type", "some_type");
-
- RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
- .withPath("/some_index/_search/template")
- .withParams(params)
- .build();
-
- dispatchRequest(request);
- assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
- }
-}
diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java
index 84734e55e241c..0a2bb247e3c1a 100644
--- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java
+++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java
@@ -36,7 +36,6 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.search.ShardSearchFailure;
import org.opensearch.common.bytes.BytesReference;
-import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -183,7 +182,7 @@ public void testSourceToXContent() throws IOException {
}
public void testSearchResponseToXContent() throws IOException {
- SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
+ SearchHit hit = new SearchHit(1, "id", Collections.emptyMap(), Collections.emptyMap());
hit.score(2.0f);
SearchHit[] hits = new SearchHit[] { hit };
@@ -229,7 +228,6 @@ public void testSearchResponseToXContent() throws IOException {
.field("max_score", 1.5F)
.startArray("hits")
.startObject()
- .field("_type", "type")
.field("_id", "id")
.field("_score", 2.0F)
.endObject()
diff --git a/modules/lang-mustache/src/test/resources/org/opensearch/script/mustache/simple-msearch-template.json b/modules/lang-mustache/src/test/resources/org/opensearch/script/mustache/simple-msearch-template.json
index 11a0091492c4d..1809b4012fde1 100644
--- a/modules/lang-mustache/src/test/resources/org/opensearch/script/mustache/simple-msearch-template.json
+++ b/modules/lang-mustache/src/test/resources/org/opensearch/script/mustache/simple-msearch-template.json
@@ -1,6 +1,6 @@
{"index":["test0", "test1"], "request_cache": true}
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
-{"index" : "test2,test3", "type" : "type1", "preference": "_local"}
+{"index" : "test2,test3", "preference": "_local"}
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
-{"index" : ["test4", "test1"], "type" : [ "type2", "type1" ], "routing": "123"}
+{"index" : ["test4", "test1"], "routing": "123"}
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml
index 22192530b9ec1..a9d3c2da68617 100644
--- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml
+++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml
@@ -141,10 +141,6 @@
---
"Test with new response format":
- - skip:
- version: " - 6.99.99"
- reason: hits.total is returned as an object in 7.0.0
-
- do:
index:
index: test
diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml
index fa56f5c0f72b1..e92e10b9ad276 100644
--- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml
+++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml
@@ -174,10 +174,6 @@ setup:
---
"Test with rest_total_hits_as_int":
- - skip:
- version: " - 6.99.99"
- reason: hits.total is returned as an object in 7.0.0
-
- do:
put_script:
id: stored_template_1
diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml
index d59bfa9ffc322..accb55624dd06 100644
--- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml
+++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml
@@ -25,15 +25,15 @@ setup:
bulk:
refresh: true
body:
- - '{"index": {"_index": "test-0", "_type": "_doc"}}'
+ - '{"index": {"_index": "test-0"}}'
- '{"ip": "10.0.0.1", "integer": 38, "float": 12.5713, "name": "Ruth", "bool": true}'
- - '{"index": {"_index": "test-0", "_type": "_doc"}}'
+ - '{"index": {"_index": "test-0"}}'
- '{"ip": "10.0.0.2", "integer": 42, "float": 15.3393, "name": "Jackie", "surname": "Bowling", "bool": false}'
- - '{"index": {"_index": "test-1", "_type": "_doc"}}'
+ - '{"index": {"_index": "test-1"}}'
- '{"ip": "10.0.0.3", "integer": 29, "float": 19.0517, "name": "Stephanie", "bool": true}'
- - '{"index": {"_index": "test-1", "_type": "_doc"}}'
+ - '{"index": {"_index": "test-1"}}'
- '{"ip": "10.0.0.4", "integer": 19, "float": 19.3717, "surname": "Hamilton", "bool": true}'
- - '{"index": {"_index": "test-2", "_type": "_doc"}}'
+ - '{"index": {"_index": "test-2"}}'
- '{"ip": "10.0.0.5", "integer": 0, "float": 17.3349, "name": "Natalie", "bool": false}'
---
diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle
index 298f28be8cc54..eb93cdc77fb9c 100644
--- a/modules/lang-painless/build.gradle
+++ b/modules/lang-painless/build.gradle
@@ -46,11 +46,11 @@ testClusters.all {
dependencies {
api 'org.antlr:antlr4-runtime:4.5.3'
- api 'org.ow2.asm:asm-util:7.2'
+ api 'org.ow2.asm:asm-util:9.2'
api 'org.ow2.asm:asm-tree:7.2'
api 'org.ow2.asm:asm-commons:7.2'
api 'org.ow2.asm:asm-analysis:7.2'
- api 'org.ow2.asm:asm:7.2'
+ api 'org.ow2.asm:asm:9.2'
api project('spi')
}
diff --git a/modules/lang-painless/licenses/asm-7.2.jar.sha1 b/modules/lang-painless/licenses/asm-7.2.jar.sha1
deleted file mode 100644
index acb97fc1a0249..0000000000000
--- a/modules/lang-painless/licenses/asm-7.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fa637eb67eb7628c915d73762b681ae7ff0b9731
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-9.2.jar.sha1
new file mode 100644
index 0000000000000..28f456d3cbcb2
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-9.2.jar.sha1
@@ -0,0 +1 @@
+81a03f76019c67362299c40e0ba13405f5467bff
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-util-7.2.jar.sha1 b/modules/lang-painless/licenses/asm-util-7.2.jar.sha1
deleted file mode 100644
index 6f70a0eea65ab..0000000000000
--- a/modules/lang-painless/licenses/asm-util-7.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a3ae34e57fa8a4040e28247291d0cc3d6b8c7bcf
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/asm-util-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.2.jar.sha1
new file mode 100644
index 0000000000000..5cb89aa115f30
--- /dev/null
+++ b/modules/lang-painless/licenses/asm-util-9.2.jar.sha1
@@ -0,0 +1 @@
+fbc178fc5ba3dab50fd7e8a5317b8b647c8e8946
\ No newline at end of file
diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java
index d0041b22929e1..fb8d2eccfa043 100644
--- a/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java
+++ b/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java
@@ -206,13 +206,10 @@ public void testDynamicListWrongIndex() {
* the parser with right-curly brackets to allow statements to be delimited by them at the end of blocks.
*/
public void testRCurlyNotDelim() {
- IllegalArgumentException e = expectScriptThrows(
- IllegalArgumentException.class,
- () -> {
- // We don't want PICKY here so we get the normal error message
- exec("def i = 1} return 1", emptyMap(), emptyMap(), false);
- }
- );
+ IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> {
+ // We don't want PICKY here so we get the normal error message
+ exec("def i = 1} return 1", emptyMap(), emptyMap(), false);
+ });
assertEquals("unexpected token ['}'] was expecting one of [{, ';'}].", e.getMessage());
}
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml
index fd5c89b490d39..cb118ed9d562f 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml
@@ -21,7 +21,6 @@
- match: { _index: test_1 }
- match: { _id: "1" }
- - match: { _type: _doc }
- match: { _version: 2 }
- do:
@@ -43,7 +42,6 @@
- match: { _index: test_1 }
- match: { _id: "1" }
- - match: { _type: _doc }
- match: { _version: 3 }
- do:
@@ -65,7 +63,6 @@
- match: { _index: test_1 }
- match: { _id: "1" }
- - match: { _type: _doc }
- match: { _version: 4 }
- do:
@@ -89,7 +86,6 @@
- match: { _index: test_1 }
- match: { _id: "1" }
- - match: { _type: _doc }
- match: { _version: 5 }
- do:
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml
index 0ce1e369cb7c5..a006fde630716 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml
@@ -452,10 +452,6 @@
---
"Exception on negative score":
- - skip:
- version: " - 6.99.99"
- reason: "check on negative scores was added from 7.0.0 on"
-
- do:
index:
index: test
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml
index 50fc0eea501df..57e7b4e31e057 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml
@@ -1,10 +1,6 @@
# Sanity integration test to make sure the custom context and whitelist work for moving_fn pipeline agg
#
setup:
- - skip:
- version: " - 6.3.99"
- reason: "moving_fn added in 6.4.0"
-
- do:
indices.create:
index: test
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml
index 495ca2131d886..d506db0cb0d3e 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml
@@ -1,10 +1,4 @@
# Integration tests for ScriptScoreQuery using Painless
-
-setup:
- - skip:
- version: " - 6.99.99"
- reason: "script score query was introduced in 7.0.0"
-
---
"Math functions":
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/90_interval_query_filter.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/90_interval_query_filter.yml
index 0a6cf993e2a2e..7db3eb8b9b2aa 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/90_interval_query_filter.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/90_interval_query_filter.yml
@@ -1,8 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: "Implemented in 7.0"
-
- do:
indices.create:
index: test
diff --git a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java
index d400fb69f34ca..7ddb27e28d94c 100644
--- a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java
+++ b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java
@@ -203,7 +203,7 @@ private SearchRequestBuilder searchByNumericRange(int low, int high) {
}
private SearchRequestBuilder prepareSearch() {
- SearchRequestBuilder request = client().prepareSearch("test").setTypes("test");
+ SearchRequestBuilder request = client().prepareSearch("test");
request.addStoredField("foo.token_count");
request.addStoredField("foo.token_count_without_position_increments");
if (loadCountedFields) {
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureFieldMapper.java
index 6f64be86f934c..31fef7301477a 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureFieldMapper.java
@@ -135,11 +135,11 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
- return new SourceValueFetcher(name(), mapperService) {
+ return new SourceValueFetcher(name(), context) {
@Override
protected Float parseSourceValue(Object value) {
return objectToFloat(value);
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapper.java
index dd29826af3af8..b9effebeb9e61 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapper.java
@@ -65,7 +65,7 @@ public String typeName() {
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java
index b35f790901f33..43853eb40f432 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java
@@ -105,8 +105,8 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
- return SourceValueFetcher.identity(name(), mapperService, format);
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
+ return SourceValueFetcher.identity(name(), context, format);
}
@Override
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
index 93f3adf476836..78a9e389eb63f 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
@@ -260,11 +260,11 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
- return new SourceValueFetcher(name(), mapperService) {
+ return new SourceValueFetcher(name(), context) {
@Override
protected Double parseSourceValue(Object value) {
double doubleValue;
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java
index 6c55c2ecd0f04..7bf102584a379 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java
@@ -329,8 +329,8 @@ private ShingleFieldType shingleFieldForPositions(int positions) {
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
- return SourceValueFetcher.toString(name(), mapperService, format);
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
+ return SourceValueFetcher.toString(name(), context, format);
}
@Override
@@ -439,10 +439,10 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
// Because this internal field is modelled as a multi-field, SourceValueFetcher will look up its
// parent field in _source. So we don't need to use the parent field name here.
- return SourceValueFetcher.toString(name(), mapperService, format);
+ return SourceValueFetcher.toString(name(), context, format);
}
@Override
@@ -545,10 +545,10 @@ void setPrefixFieldType(PrefixFieldType prefixFieldType) {
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
// Because this internal field is modelled as a multi-field, SourceValueFetcher will look up its
// parent field in _source. So we don't need to use the parent field name here.
- return SourceValueFetcher.toString(name(), mapperService, format);
+ return SourceValueFetcher.toString(name(), context, format);
}
@Override
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/TokenCountFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/TokenCountFieldMapper.java
index ed71ffa5158d7..fd029503e9a7b 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/TokenCountFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/TokenCountFieldMapper.java
@@ -36,6 +36,7 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.opensearch.index.analysis.NamedAnalyzer;
+import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.lookup.SearchLookup;
import java.io.IOException;
@@ -119,7 +120,7 @@ static class TokenCountFieldType extends NumberFieldMapper.NumberFieldType {
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
if (hasDocValues() == false) {
return lookup -> org.opensearch.common.collect.List.of();
}
diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml
index 8a874d30591f6..6fea35eb21f4e 100644
--- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml
+++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml
@@ -1,8 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: "The rank feature field/query was introduced in 7.0.0"
-
- do:
indices.create:
index: test
diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml
index f524bd93bb600..d4d5d2a360406 100644
--- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml
+++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml
@@ -1,8 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: "The rank_features field was introduced in 7.0.0"
-
- do:
indices.create:
index: test
diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml
index ffe05097748a6..21843dad1d177 100644
--- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml
+++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml
@@ -19,7 +19,6 @@ setup:
- do:
index:
index: test
- type: _doc
id: 1
body:
a_field: "quick brown fox jump lazy dog"
@@ -28,7 +27,6 @@ setup:
- do:
index:
index: test
- type: _doc
id: 2
body:
a_field: "xylophone xylophone xylophone"
@@ -41,7 +39,6 @@ setup:
- do:
get:
index: test
- type: _doc
id: 1
- is_true: found
diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml
index 15778393959e5..58441abac8f88 100644
--- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml
+++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml
@@ -22,7 +22,6 @@ setup:
- do:
index:
index: test
- type: _doc
id: 1
body:
a_field: "quick brown fox jump lazy dog"
diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java
index 74c884c9d0e25..b831e76c4ad71 100644
--- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java
+++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java
@@ -132,7 +132,7 @@ public void testParentWithMultipleBuckets() throws Exception {
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
logger.info("total_hits={}", topHits.getHits().getTotalHits().value);
for (SearchHit searchHit : topHits.getHits()) {
- logger.info("hit= {} {} {}", searchHit.getSortValues()[0], searchHit.getType(), searchHit.getId());
+ logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId());
}
}
diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java
index 2972b170e07b7..6910964ac8c58 100644
--- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java
+++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java
@@ -201,7 +201,7 @@ public void testSimpleChildQuery() throws Exception {
// TEST FETCHING _parent from child
SearchResponse searchResponse;
- searchResponse = client().prepareSearch("test").setQuery(idsQuery("doc").addIds("c1")).get();
+ searchResponse = client().prepareSearch("test").setQuery(idsQuery().addIds("c1")).get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
@@ -608,7 +608,7 @@ public void testExplainUsage() throws Exception {
assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1"));
- ExplainResponse explainResponse = client().prepareExplain("test", "doc", parentId)
+ ExplainResponse explainResponse = client().prepareExplain("test", parentId)
.setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max))
.get();
assertThat(explainResponse.isExists(), equalTo(true));
diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java
index 913cba6950228..03cff9c19a703 100644
--- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java
+++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java
@@ -151,9 +151,7 @@ public void testSimpleParentChild() throws Exception {
assertThat(innerHits.getTotalHits().value, equalTo(2L));
assertThat(innerHits.getAt(0).getId(), equalTo("c1"));
- assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
assertThat(innerHits.getAt(1).getId(), equalTo("c2"));
- assertThat(innerHits.getAt(1).getType(), equalTo("doc"));
final boolean seqNoAndTerm = randomBoolean();
response = client().prepareSearch("articles")
@@ -172,11 +170,8 @@ public void testSimpleParentChild() throws Exception {
assertThat(innerHits.getTotalHits().value, equalTo(3L));
assertThat(innerHits.getAt(0).getId(), equalTo("c4"));
- assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
assertThat(innerHits.getAt(1).getId(), equalTo("c5"));
- assertThat(innerHits.getAt(1).getType(), equalTo("doc"));
assertThat(innerHits.getAt(2).getId(), equalTo("c6"));
- assertThat(innerHits.getAt(2).getType(), equalTo("doc"));
if (seqNoAndTerm) {
assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(1L));
@@ -301,7 +296,6 @@ public void testRandomParentChild() throws Exception {
int offset2 = 0;
for (int parent = 0; parent < numDocs; parent++) {
SearchHit searchHit = searchResponse.getHits().getAt(parent);
- assertThat(searchHit.getType(), equalTo("doc"));
assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "p_%03d", parent)));
assertThat(searchHit.getShard(), notNullValue());
@@ -309,7 +303,6 @@ public void testRandomParentChild() throws Exception {
assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent]));
for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) {
SearchHit innerHit = inner.getAt(child);
- assertThat(innerHit.getType(), equalTo("doc"));
String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child);
assertThat(innerHit.getId(), equalTo(childId));
assertThat(innerHit.getNestedIdentity(), nullValue());
@@ -320,7 +313,6 @@ public void testRandomParentChild() throws Exception {
assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent]));
for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) {
SearchHit innerHit = inner.getAt(child);
- assertThat(innerHit.getType(), equalTo("doc"));
String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child);
assertThat(innerHit.getId(), equalTo(childId));
assertThat(innerHit.getNestedIdentity(), nullValue());
@@ -376,16 +368,12 @@ public void testInnerHitsOnHasParent() throws Exception {
SearchHit searchHit = response.getHits().getAt(0);
assertThat(searchHit.getId(), equalTo("3"));
- assertThat(searchHit.getType(), equalTo("doc"));
assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L));
- assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc"));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1"));
searchHit = response.getHits().getAt(1);
assertThat(searchHit.getId(), equalTo("4"));
- assertThat(searchHit.getType(), equalTo("doc"));
assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L));
- assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc"));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2"));
}
@@ -430,12 +418,10 @@ public void testParentChildMultipleLayers() throws Exception {
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("3"));
- assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("5"));
- assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
response = client().prepareSearch("articles")
.setQuery(
@@ -455,12 +441,10 @@ public void testParentChildMultipleLayers() throws Exception {
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("4"));
- assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("6"));
- assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
}
public void testRoyals() throws Exception {
@@ -613,7 +597,7 @@ public void testUseMaxDocInsteadOfSize() throws Exception {
assertHitCount(response, 1);
}
- public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception {
+ public void testNestedInnerHitWrappedInParentChildInnerhit() {
assertAcked(
prepareCreate("test").addMapping(
"doc",
@@ -646,7 +630,7 @@ public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception {
assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue());
}
- public void testInnerHitsWithIgnoreUnmapped() throws Exception {
+ public void testInnerHitsWithIgnoreUnmapped() {
assertAcked(
prepareCreate("index1").addMapping(
"doc",
@@ -676,7 +660,7 @@ public void testInnerHitsWithIgnoreUnmapped() throws Exception {
assertSearchHits(response, "1", "3");
}
- public void testTooHighResultWindow() throws Exception {
+ public void testTooHighResultWindow() {
assertAcked(
prepareCreate("index1").addMapping(
"doc",
diff --git a/modules/parent-join/src/main/java/org/opensearch/join/mapper/MetaJoinFieldMapper.java b/modules/parent-join/src/main/java/org/opensearch/join/mapper/MetaJoinFieldMapper.java
index 4570a67d67e63..2b0d3e4764b2a 100644
--- a/modules/parent-join/src/main/java/org/opensearch/join/mapper/MetaJoinFieldMapper.java
+++ b/modules/parent-join/src/main/java/org/opensearch/join/mapper/MetaJoinFieldMapper.java
@@ -39,7 +39,6 @@
import org.opensearch.index.fielddata.IndexFieldData;
import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
import org.opensearch.index.mapper.FieldMapper;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.ParseContext;
import org.opensearch.index.mapper.StringFieldType;
import org.opensearch.index.mapper.TextSearchInfo;
@@ -111,7 +110,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for metadata field [" + typeName() + "].");
}
diff --git a/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentIdFieldMapper.java b/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentIdFieldMapper.java
index 4e3d26d73c30f..29a02a5bc8032 100644
--- a/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentIdFieldMapper.java
+++ b/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentIdFieldMapper.java
@@ -48,11 +48,11 @@
import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
import org.opensearch.index.mapper.FieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.ParseContext;
import org.opensearch.index.mapper.StringFieldType;
import org.opensearch.index.mapper.TextSearchInfo;
import org.opensearch.index.mapper.ValueFetcher;
+import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.aggregations.support.CoreValuesSourceType;
import org.opensearch.search.lookup.SearchLookup;
@@ -132,7 +132,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}
diff --git a/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentJoinFieldMapper.java b/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentJoinFieldMapper.java
index 7d34e6d40a752..662bacc0fb08a 100644
--- a/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentJoinFieldMapper.java
+++ b/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentJoinFieldMapper.java
@@ -57,6 +57,7 @@
import org.opensearch.index.mapper.StringFieldType;
import org.opensearch.index.mapper.TextSearchInfo;
import org.opensearch.index.mapper.ValueFetcher;
+import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.aggregations.support.CoreValuesSourceType;
import org.opensearch.search.lookup.SearchLookup;
@@ -243,8 +244,8 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
- return SourceValueFetcher.identity(name(), mapperService, format);
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
+ return SourceValueFetcher.identity(name(), context, format);
}
@Override
diff --git a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
index 62040b3893e83..628345a625d1b 100644
--- a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
+++ b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
@@ -474,7 +474,7 @@ public void testUpdateRelations() throws Exception {
.endObject()
);
docMapper = indexService.mapperService()
- .merge("_doc", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE);
+ .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE);
ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService());
assertNotNull(mapper);
assertEquals("join_field", mapper.name());
@@ -501,7 +501,7 @@ public void testUpdateRelations() throws Exception {
.endObject()
);
docMapper = indexService.mapperService()
- .merge("_doc", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE);
+ .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE);
ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService());
assertNotNull(mapper);
assertEquals("join_field", mapper.name());
diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java
index e069fc23a141d..5595c98a439bf 100644
--- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java
+++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java
@@ -290,13 +290,9 @@ public void testFromJson() throws IOException {
}
public void testToQueryInnerQueryType() throws IOException {
- String[] searchTypes = new String[] { TYPE };
QueryShardContext shardContext = createShardContext();
- shardContext.setTypes(searchTypes);
HasChildQueryBuilder hasChildQueryBuilder = hasChildQuery(CHILD_DOC, new IdsQueryBuilder().addIds("id"), ScoreMode.None);
Query query = hasChildQueryBuilder.toQuery(shardContext);
- // verify that the context types are still the same as the ones we previously set
- assertThat(shardContext.getTypes(), equalTo(searchTypes));
assertLateParsingQuery(query, CHILD_DOC, "id");
}
diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java
index 9783cb703ade1..0f983799a6d25 100644
--- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java
+++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java
@@ -192,13 +192,9 @@ public void testIllegalValues() throws IOException {
}
public void testToQueryInnerQueryType() throws IOException {
- String[] searchTypes = new String[] { TYPE };
QueryShardContext shardContext = createShardContext();
- shardContext.setTypes(searchTypes);
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_DOC, new IdsQueryBuilder().addIds("id"), false);
Query query = hasParentQueryBuilder.toQuery(shardContext);
- // verify that the context types are still the same as the ones we previously set
- assertThat(shardContext.getTypes(), equalTo(searchTypes));
HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_DOC, "id");
}
diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml
index bff5639e4d270..bb2d39fbbdd4e 100644
--- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml
+++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml
@@ -49,35 +49,29 @@ teardown:
- match: { hits.total: 6 }
- match: { hits.hits.0._index: "test" }
- - match: { hits.hits.0._type: "_doc" }
- match: { hits.hits.0._id: "3" }
- match: { hits.hits.0._source.join_field.name: "child" }
- match: { hits.hits.0._source.join_field.parent: "1" }
- is_false: hits.hits.0.fields.join_field#child }
- match: { hits.hits.1._index: "test" }
- - match: { hits.hits.1._type: "_doc" }
- match: { hits.hits.1._id: "4" }
- match: { hits.hits.1._source.join_field.name: "child" }
- match: { hits.hits.1._source.join_field.parent: "1" }
- is_false: hits.hits.1.fields.join_field#child }
- match: { hits.hits.2._index: "test" }
- - match: { hits.hits.2._type: "_doc" }
- match: { hits.hits.2._id: "5" }
- match: { hits.hits.2._source.join_field.name: "child" }
- match: { hits.hits.2._source.join_field.parent: "2" }
- is_false: hits.hits.2.fields.join_field#child }
- match: { hits.hits.3._index: "test" }
- - match: { hits.hits.3._type: "_doc" }
- match: { hits.hits.3._id: "6" }
- match: { hits.hits.3._source.join_field.name: "grand_child" }
- match: { hits.hits.3._source.join_field.parent: "5" }
- match: { hits.hits.4._index: "test" }
- - match: { hits.hits.4._type: "_doc" }
- match: { hits.hits.4._id: "1" }
- match: { hits.hits.4._source.join_field.name: "parent" }
- is_false: hits.hits.4._source.join_field.parent
- match: { hits.hits.5._index: "test" }
- - match: { hits.hits.5._type: "_doc" }
- match: { hits.hits.5._id: "2" }
- match: { hits.hits.5._source.join_field.name: "parent" }
- is_false: hits.hits.5._source.join_field.parent
@@ -96,12 +90,10 @@ teardown:
- match: { hits.total: 2 }
- match: { hits.hits.0._index: "test" }
- - match: { hits.hits.0._type: "_doc" }
- match: { hits.hits.0._id: "3" }
- match: { hits.hits.0._source.join_field.name: "child" }
- match: { hits.hits.0._source.join_field.parent: "1" }
- match: { hits.hits.1._index: "test" }
- - match: { hits.hits.1._type: "_doc" }
- match: { hits.hits.1._id: "4" }
- match: { hits.hits.1._source.join_field.name: "child" }
- match: { hits.hits.1._source.join_field.parent: "1" }
diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java
index c111590d7a2ca..f76f14a6d9575 100644
--- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java
+++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java
@@ -889,7 +889,6 @@ public void testWithMultiplePercolatorFields() throws Exception {
.get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getType(), equalTo("type"));
assertThat(response.getHits().getAt(0).getIndex(), equalTo("test1"));
response = client().prepareSearch()
@@ -898,7 +897,6 @@ public void testWithMultiplePercolatorFields() throws Exception {
.get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getType(), equalTo("type"));
assertThat(response.getHits().getAt(0).getIndex(), equalTo("test2"));
// Unacceptable:
diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
index 5e245f7082ada..672d4dd15a254 100644
--- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
+++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
@@ -501,13 +501,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) {
return rewritten;
}
}
- GetRequest getRequest;
- if (indexedDocumentType != null) {
- deprecationLogger.deprecate("percolate_with_type", TYPE_DEPRECATION_MESSAGE);
- getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentType, indexedDocumentId);
- } else {
- getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentId);
- }
+ GetRequest getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentId);
getRequest.preference("_local");
getRequest.routing(indexedDocumentRouting);
getRequest.preference(indexedDocumentPreference);
diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java
index 72adc5539d6d1..a8b0395dd84e0 100644
--- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java
+++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java
@@ -72,7 +72,6 @@
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.Mapper;
import org.opensearch.index.mapper.MapperParsingException;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.NumberFieldMapper;
import org.opensearch.index.mapper.ParametrizedFieldMapper;
import org.opensearch.index.mapper.ParseContext;
@@ -196,7 +195,12 @@ static BinaryFieldMapper createQueryBuilderFieldBuilder(BuilderContext context)
}
static RangeFieldMapper createExtractedRangeFieldBuilder(String name, RangeType rangeType, BuilderContext context) {
- RangeFieldMapper.Builder builder = new RangeFieldMapper.Builder(name, rangeType, true);
+ RangeFieldMapper.Builder builder = new RangeFieldMapper.Builder(
+ name,
+ rangeType,
+ true,
+ hasIndexCreated(context.indexSettings()) ? context.indexCreatedVersion() : null
+ );
// For now no doc values, because in processQuery(...) only the Lucene range fields get added:
builder.docValues(false);
return builder.build(context);
@@ -245,8 +249,8 @@ public Query termQuery(Object value, QueryShardContext context) {
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
- return SourceValueFetcher.identity(name(), mapperService, format);
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
+ return SourceValueFetcher.identity(name(), context, format);
}
Query percolateQuery(
diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java
index 670aa74501f60..9b4e42d239750 100644
--- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java
+++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java
@@ -38,7 +38,6 @@
import org.apache.lucene.search.QueryVisitor;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.document.DocumentField;
-import org.opensearch.common.text.Text;
import org.opensearch.search.SearchHit;
import org.opensearch.search.fetch.FetchContext;
import org.opensearch.search.fetch.FetchSubPhase;
@@ -108,13 +107,7 @@ public void process(HitContext hit) throws IOException {
int slot = (int) matchedSlot;
BytesReference document = percolateQuery.getDocuments().get(slot);
HitContext subContext = new HitContext(
- new SearchHit(
- slot,
- "unknown",
- new Text(hit.hit().getType()),
- Collections.emptyMap(),
- Collections.emptyMap()
- ),
+ new SearchHit(slot, "unknown", Collections.emptyMap(), Collections.emptyMap()),
percolatorLeafReaderContext,
slot,
new SourceLookup()
diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java
index 3b0830b7e4519..5f11feee8f441 100644
--- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java
+++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java
@@ -184,7 +184,6 @@ protected String[] shuffleProtectedFields() {
@Override
protected GetResponse executeGet(GetRequest getRequest) {
assertThat(getRequest.index(), Matchers.equalTo(indexedDocumentIndex));
- assertThat(getRequest.type(), Matchers.equalTo(MapperService.SINGLE_MAPPING_NAME));
assertThat(getRequest.id(), Matchers.equalTo(indexedDocumentId));
assertThat(getRequest.routing(), Matchers.equalTo(indexedDocumentRouting));
assertThat(getRequest.preference(), Matchers.equalTo(indexedDocumentPreference));
@@ -193,7 +192,6 @@ protected GetResponse executeGet(GetRequest getRequest) {
return new GetResponse(
new GetResult(
indexedDocumentIndex,
- MapperService.SINGLE_MAPPING_NAME,
indexedDocumentId,
0,
1,
@@ -208,7 +206,6 @@ protected GetResponse executeGet(GetRequest getRequest) {
return new GetResponse(
new GetResult(
indexedDocumentIndex,
- MapperService.SINGLE_MAPPING_NAME,
indexedDocumentId,
UNASSIGNED_SEQ_NO,
0,
@@ -341,7 +338,6 @@ public void testFromJsonWithType() throws IOException {
+ "\"}}"
);
rewriteAndFetch(queryBuilder, queryShardContext).toQuery(queryShardContext);
- assertWarnings(PercolateQueryBuilder.TYPE_DEPRECATION_MESSAGE);
}
public void testBothDocumentAndDocumentsSpecified() {
diff --git a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml
index 08d344687adc7..35ebb2b099139 100644
--- a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml
+++ b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml
@@ -1,10 +1,5 @@
---
"Test percolator basics via rest":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
indices.create:
index: queries_index
@@ -74,7 +69,7 @@
percolate:
field: query
index: documents_index
- id: some_id
+ id: some_id
- match: { hits.total: 1 }
- do:
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
index 7c385cd45a840..2b1c56d9bba3b 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
@@ -36,7 +36,6 @@
import org.opensearch.common.Strings;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -44,7 +43,6 @@
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.xcontent.json.JsonXContent;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.shard.ShardId;
import org.opensearch.search.SearchHit;
import org.opensearch.search.SearchShardTarget;
@@ -87,13 +85,7 @@ public void testDCGAt() {
SearchHit[] hits = new SearchHit[6];
for (int i = 0; i < 6; i++) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
- hits[i] = new SearchHit(
- i,
- Integer.toString(i),
- new Text(MapperService.SINGLE_MAPPING_NAME),
- Collections.emptyMap(),
- Collections.emptyMap()
- );
+ hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
@@ -143,13 +135,7 @@ public void testDCGAtSixMissingRatings() {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
}
}
- hits[i] = new SearchHit(
- i,
- Integer.toString(i),
- new Text(MapperService.SINGLE_MAPPING_NAME),
- Collections.emptyMap(),
- Collections.emptyMap()
- );
+ hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
@@ -206,13 +192,7 @@ public void testDCGAtFourMoreRatings() {
// only create four hits
SearchHit[] hits = new SearchHit[4];
for (int i = 0; i < 4; i++) {
- hits[i] = new SearchHit(
- i,
- Integer.toString(i),
- new Text(MapperService.SINGLE_MAPPING_NAME),
- Collections.emptyMap(),
- Collections.emptyMap()
- );
+ hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java
index 4fb0089a32cb1..723a1e2202e2b 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java
@@ -35,7 +35,6 @@
import org.opensearch.action.OriginalIndices;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -43,7 +42,6 @@
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.xcontent.json.JsonXContent;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.shard.ShardId;
import org.opensearch.search.SearchHit;
import org.opensearch.search.SearchShardTarget;
@@ -130,13 +128,7 @@ private SearchHit[] createSearchHits(List rated, Integer[] releva
if (relevanceRatings[i] != null) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
}
- hits[i] = new SearchHit(
- i,
- Integer.toString(i),
- new Text(MapperService.SINGLE_MAPPING_NAME),
- Collections.emptyMap(),
- Collections.emptyMap()
- );
+ hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
return hits;
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java
index befb9bdf371ab..2cd16c05f2a20 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java
@@ -35,7 +35,6 @@
import org.opensearch.action.OriginalIndices;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -216,7 +215,7 @@ public void testXContentParsingIsNotLenient() throws IOException {
private static SearchHit[] createSearchHits(int from, int to, String index) {
SearchHit[] hits = new SearchHit[to + 1 - from];
for (int i = from; i <= to; i++) {
- hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
+ hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
}
return hits;
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java
index ea9eadb0c9cbd..1c7a02dc27cf7 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java
@@ -35,7 +35,6 @@
import org.opensearch.action.OriginalIndices;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -43,7 +42,6 @@
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.xcontent.json.JsonXContent;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.shard.ShardId;
import org.opensearch.search.SearchHit;
import org.opensearch.search.SearchShardTarget;
@@ -127,7 +125,7 @@ public void testIgnoreUnlabeled() {
rated.add(createRatedDoc("test", "1", RELEVANT_RATING));
// add an unlabeled search hit
SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3);
- searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
+ searchHits[2] = new SearchHit(2, "2", Collections.emptyMap(), Collections.emptyMap());
searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated);
@@ -146,7 +144,7 @@ public void testIgnoreUnlabeled() {
public void testNoRatedDocs() throws Exception {
SearchHit[] hits = new SearchHit[5];
for (int i = 0; i < 5; i++) {
- hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
+ hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
@@ -283,7 +281,7 @@ private static PrecisionAtK mutate(PrecisionAtK original) {
private static SearchHit[] toSearchHits(List rated, String index) {
SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) {
- hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
+ hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
}
return hits;
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java
index 7e81dde0cab29..3d883b373d705 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java
@@ -44,14 +44,12 @@
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.common.io.stream.StreamInput;
-import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentLocation;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.shard.ShardId;
import org.opensearch.search.SearchHit;
import org.opensearch.search.SearchParseException;
@@ -188,9 +186,9 @@ public void testToXContent() throws IOException {
+ " \"coffee_query\": {"
+ " \"metric_score\": 0.1,"
+ " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}],"
- + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"123\",\"_score\":1.0},"
+ + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_id\":\"123\",\"_score\":1.0},"
+ " \"rating\":5},"
- + " {\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"456\",\"_score\":1.0},"
+ + " {\"hit\":{\"_index\":\"index\",\"_id\":\"456\",\"_score\":1.0},"
+ " \"rating\":null}"
+ " ]"
+ " }"
@@ -210,13 +208,7 @@ public void testToXContent() throws IOException {
}
private static RatedSearchHit searchHit(String index, int docId, Integer rating) {
- SearchHit hit = new SearchHit(
- docId,
- docId + "",
- new Text(MapperService.SINGLE_MAPPING_NAME),
- Collections.emptyMap(),
- Collections.emptyMap()
- );
+ SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap(), Collections.emptyMap());
hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
hit.score(1.0f);
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java
index bfc9098f59e43..555a0c95a3456 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java
@@ -34,11 +34,9 @@
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.search.SearchHit;
import org.opensearch.test.OpenSearchTestCase;
@@ -55,7 +53,6 @@ public static RatedSearchHit randomRatedSearchHit() {
SearchHit searchHit = new SearchHit(
randomIntBetween(0, 10),
randomAlphaOfLength(10),
- new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(),
Collections.emptyMap()
);
@@ -71,13 +68,7 @@ private static RatedSearchHit mutateTestItem(RatedSearchHit original) {
rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5));
break;
case 1:
- hit = new SearchHit(
- hit.docId(),
- hit.getId() + randomAlphaOfLength(10),
- new Text(MapperService.SINGLE_MAPPING_NAME),
- Collections.emptyMap(),
- Collections.emptyMap()
- );
+ hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap(), Collections.emptyMap());
break;
default:
throw new IllegalStateException("The test should only allow two parameters mutated");
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java
index 6ffaaa8e89e7b..6efb44a3875e1 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java
@@ -35,7 +35,6 @@
import org.opensearch.action.OriginalIndices;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
-import org.opensearch.common.text.Text;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -128,7 +127,7 @@ public void testNoRatedDocs() throws Exception {
int k = 5;
SearchHit[] hits = new SearchHit[k];
for (int i = 0; i < k; i++) {
- hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
+ hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
@@ -252,7 +251,7 @@ private static RecallAtK mutate(RecallAtK original) {
private static SearchHit[] toSearchHits(List rated, String index) {
SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) {
- hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
+ hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
}
return hits;
diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/10_basic.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/10_basic.yml
index 382b0789ba0ec..2ad583e03caaa 100644
--- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/10_basic.yml
+++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/10_basic.yml
@@ -40,11 +40,6 @@ setup:
---
"Response format":
-
- - skip:
- version: " - 6.2.99"
- reason: response format was updated in 6.3
-
- do:
rank_eval:
index: foo,
@@ -121,11 +116,6 @@ setup:
---
"Mean Reciprocal Rank":
-
- - skip:
- version: " - 6.2.99"
- reason: response format was updated in 6.3
-
- do:
rank_eval:
body: {
@@ -160,11 +150,6 @@ setup:
---
"Expected Reciprocal Rank":
-
- - skip:
- version: " - 6.3.99"
- reason: ERR was introduced in 6.4
-
- do:
rank_eval:
body: {
diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/20_dcg.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/20_dcg.yml
index 90094baabb9db..82005efcebe18 100644
--- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/20_dcg.yml
+++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/20_dcg.yml
@@ -1,10 +1,5 @@
---
"Response format":
-
- - skip:
- version: " - 6.1.99"
- reason: the ranking evaluation feature is available since 6.2
-
- do:
index:
index: foo
diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/30_failures.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/30_failures.yml
index b9f55ed12ad7e..c88a769b8687b 100644
--- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/30_failures.yml
+++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/30_failures.yml
@@ -1,10 +1,5 @@
---
"Response format":
-
- - skip:
- version: " - 6.2.99"
- reason: response format was updated in 6.3
-
- do:
index:
index: foo
diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml
index 57d5aa5642ef6..08897e17ef900 100644
--- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml
+++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml
@@ -48,11 +48,6 @@ setup:
---
"Basic rank-eval request with template":
-
- - skip:
- version: " - 6.1.99"
- reason: the ranking evaluation feature is available since 6.2
-
- do:
rank_eval:
body: {
diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java
index b19de5150dfe8..827afdeb7ad86 100644
--- a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java
+++ b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java
@@ -311,7 +311,7 @@ private ReindexRequestBuilder reindexAndPartiallyBlock() throws Exception {
assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0));
ReindexRequestBuilder builder = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source(INDEX_NAME)
- .destination("target_index", "_doc");
+ .destination("target_index");
// Scroll by 1 so that cancellation is easier to control
builder.source().setSize(1);
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
index 07d67290d8f2f..43adffc6f7671 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
@@ -60,7 +60,6 @@
import org.opensearch.index.mapper.IndexFieldMapper;
import org.opensearch.index.mapper.RoutingFieldMapper;
import org.opensearch.index.mapper.SourceFieldMapper;
-import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.index.mapper.VersionFieldMapper;
import org.opensearch.index.reindex.ScrollableHitSource.SearchFailure;
import org.opensearch.script.Script;
@@ -249,7 +248,7 @@ protected boolean accept(ScrollableHitSource.Hit doc) {
* change the "fields" part of the search request it is unlikely that we got here because we didn't fetch _source.
* Thus the error message assumes that it wasn't stored.
*/
- throw new IllegalArgumentException("[" + doc.getIndex() + "][" + doc.getType() + "][" + doc.getId() + "] didn't store _source");
+ throw new IllegalArgumentException("[" + doc.getIndex() + "][" + doc.getId() + "] didn't store _source");
}
return true;
}
@@ -597,10 +596,6 @@ public interface RequestWrapper> {
String getIndex();
- void setType(String type);
-
- String getType();
-
void setId(String id);
String getId();
@@ -643,16 +638,6 @@ public String getIndex() {
return request.index();
}
- @Override
- public void setType(String type) {
- request.type(type);
- }
-
- @Override
- public String getType() {
- return request.type();
- }
-
@Override
public void setId(String id) {
request.id(id);
@@ -732,16 +717,6 @@ public String getIndex() {
return request.index();
}
- @Override
- public void setType(String type) {
- request.type(type);
- }
-
- @Override
- public String getType() {
- return request.type();
- }
-
@Override
public void setId(String id) {
request.id(id);
@@ -831,7 +806,6 @@ public RequestWrapper> apply(RequestWrapper> request, ScrollableHitSource.Hi
Map context = new HashMap<>();
context.put(IndexFieldMapper.NAME, doc.getIndex());
- context.put(TypeFieldMapper.NAME, doc.getType());
context.put(IdFieldMapper.NAME, doc.getId());
Long oldVersion = doc.getVersion();
context.put(VersionFieldMapper.NAME, oldVersion);
@@ -861,10 +835,6 @@ public RequestWrapper> apply(RequestWrapper> request, ScrollableHitSource.Hi
if (false == doc.getIndex().equals(newValue)) {
scriptChangedIndex(request, newValue);
}
- newValue = context.remove(TypeFieldMapper.NAME);
- if (false == doc.getType().equals(newValue)) {
- scriptChangedType(request, newValue);
- }
newValue = context.remove(IdFieldMapper.NAME);
if (false == doc.getId().equals(newValue)) {
scriptChangedId(request, newValue);
@@ -899,7 +869,7 @@ protected RequestWrapper> scriptChangedOpType(RequestWrapper> request, OpTyp
taskWorker.countNoop();
return null;
case DELETE:
- RequestWrapper delete = wrap(new DeleteRequest(request.getIndex(), request.getType(), request.getId()));
+ RequestWrapper delete = wrap(new DeleteRequest(request.getIndex(), request.getId()));
delete.setVersion(request.getVersion());
delete.setVersionType(VersionType.INTERNAL);
delete.setRouting(request.getRouting());
@@ -911,8 +881,6 @@ protected RequestWrapper> scriptChangedOpType(RequestWrapper> request, OpTyp
protected abstract void scriptChangedIndex(RequestWrapper> request, Object to);
- protected abstract void scriptChangedType(RequestWrapper> request, Object to);
-
protected abstract void scriptChangedId(RequestWrapper> request, Object to);
protected abstract void scriptChangedVersion(RequestWrapper> request, Object to);
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java
index ac1a7c22a4d2f..1a9ce16acc255 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java
@@ -67,7 +67,6 @@ protected boolean accept(ScrollableHitSource.Hit doc) {
protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) {
DeleteRequest delete = new DeleteRequest();
delete.index(doc.getIndex());
- delete.type(doc.getType());
delete.id(doc.getId());
delete.setIfSeqNo(doc.getSeqNo());
delete.setIfPrimaryTerm(doc.getPrimaryTerm());
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java
index 0037e1d06a115..8ade055d10f60 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java
@@ -352,13 +352,6 @@ protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc)
// Copy the index from the request so we always write where it asked to write
index.index(mainRequest.getDestination().index());
- // If the request override's type then the user wants all documents in that type. Otherwise keep the doc's type.
- if (mainRequest.getDestination().type() == null) {
- index.type(doc.getType());
- } else {
- index.type(mainRequest.getDestination().type());
- }
-
/*
* Internal versioning can just use what we copied from the destination request. Otherwise we assume we're using external
* versioning and use the doc's version.
@@ -460,12 +453,6 @@ protected void scriptChangedIndex(RequestWrapper> request, Object to) {
request.setIndex(to.toString());
}
- @Override
- protected void scriptChangedType(RequestWrapper> request, Object to) {
- requireNonNull(to, "Can't reindex without a destination type!");
- request.setType(to.toString());
- }
-
@Override
protected void scriptChangedId(RequestWrapper> request, Object to) {
request.setId(Objects.toString(to, null));
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java
index 25fd1a250d362..f07915b9d9e76 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java
@@ -46,7 +46,6 @@
import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.index.mapper.IndexFieldMapper;
import org.opensearch.index.mapper.RoutingFieldMapper;
-import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.script.Script;
import org.opensearch.script.ScriptService;
import org.opensearch.tasks.Task;
@@ -138,7 +137,6 @@ public BiFunction, ScrollableHitSource.Hit, RequestWrapper>>
protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) {
IndexRequest index = new IndexRequest();
index.index(doc.getIndex());
- index.type(doc.getType());
index.id(doc.getId());
index.source(doc.getSource(), doc.getXContentType());
index.setIfSeqNo(doc.getSeqNo());
@@ -163,11 +161,6 @@ protected void scriptChangedIndex(RequestWrapper> request, Object to) {
throw new IllegalArgumentException("Modifying [" + IndexFieldMapper.NAME + "] not allowed");
}
- @Override
- protected void scriptChangedType(RequestWrapper> request, Object to) {
- throw new IllegalArgumentException("Modifying [" + TypeFieldMapper.NAME + "] not allowed");
- }
-
@Override
protected void scriptChangedId(RequestWrapper> request, Object to) {
throw new IllegalArgumentException("Modifying [" + IdFieldMapper.NAME + "] not allowed");
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java
index 69ec2e8b852cb..8467fbdeacd0e 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java
@@ -79,7 +79,6 @@ static Request initialSearch(SearchRequest searchRequest, BytesReference query,
// It is nasty to build paths with StringBuilder but we'll be careful....
StringBuilder path = new StringBuilder("/");
addIndices(path, searchRequest.indices());
- addTypes(path, searchRequest.types());
path.append("_search");
Request request = new Request("POST", path.toString());
@@ -210,16 +209,6 @@ private static String encodeIndex(String s) {
}
}
- private static void addTypes(StringBuilder path, String[] types) {
- if (types == null || types.length == 0) {
- return;
- }
- for (String indexOrType : types) {
- checkIndexOrType("Type", indexOrType);
- }
- path.append(Strings.arrayToCommaDelimitedString(types)).append('/');
- }
-
private static void checkIndexOrType(String name, String indexOrType) {
if (indexOrType.indexOf(',') >= 0) {
throw new IllegalArgumentException(name + " containing [,] not supported but got [" + indexOrType + "]");
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java
index 4c57872462f0b..d22b995036e90 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java
@@ -75,14 +75,12 @@ private RemoteResponseParsers() {}
public static final ConstructingObjectParser HIT_PARSER = new ConstructingObjectParser<>("hit", true, a -> {
int i = 0;
String index = (String) a[i++];
- String type = (String) a[i++];
String id = (String) a[i++];
Long version = (Long) a[i++];
- return new BasicHit(index, type, id, version == null ? -1 : version);
+ return new BasicHit(index, id, version == null ? -1 : version);
});
static {
HIT_PARSER.declareString(constructorArg(), new ParseField("_index"));
- HIT_PARSER.declareString(constructorArg(), new ParseField("_type"));
HIT_PARSER.declareString(constructorArg(), new ParseField("_id"));
HIT_PARSER.declareLong(optionalConstructorArg(), new ParseField("_version"));
HIT_PARSER.declareObject(((basicHit, tuple) -> basicHit.setSource(tuple.v1(), tuple.v2())), (p, s) -> {
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java
index a71381d968ca8..003f3b0824602 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java
@@ -37,7 +37,7 @@ public abstract class AbstractAsyncBulkByScrollActionMetadataTestCase<
Response extends BulkByScrollResponse> extends AbstractAsyncBulkByScrollActionTestCase {
protected ScrollableHitSource.BasicHit doc() {
- return new ScrollableHitSource.BasicHit("index", "type", "id", 0);
+ return new ScrollableHitSource.BasicHit("index", "id", 0);
}
protected abstract AbstractAsyncBulkByScrollAction action();
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java
index 3c19edc89c865..671faef6c5545 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java
@@ -65,8 +65,8 @@ public void setupScriptService() {
@SuppressWarnings("unchecked")
protected T applyScript(Consumer> scriptBody) {
- IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar"));
- ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0);
+ IndexRequest index = new IndexRequest("index").id("1").source(singletonMap("foo", "bar"));
+ ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "id", 0);
UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) {
@Override
public void execute() {
@@ -79,11 +79,6 @@ public void execute() {
return (result != null) ? (T) result.self() : null;
}
- public void testTypeDeprecation() {
- applyScript((Map ctx) -> ctx.get("_type"));
- assertWarnings("[types removal] Looking up doc types [_type] in scripts is deprecated.");
- }
-
public void testScriptAddingJunkToCtxIsError() {
try {
applyScript((Map ctx) -> ctx.put("junk", "junk"));
@@ -102,16 +97,9 @@ public void testChangeSource() {
assertEquals("cat", index.sourceAsMap().get("bar"));
}
- public void testSetOpTypeNoop() throws Exception {
- assertThat(task.getStatus().getNoops(), equalTo(0L));
- assertNull(applyScript((Map ctx) -> ctx.put("op", OpType.NOOP.toString())));
- assertThat(task.getStatus().getNoops(), equalTo(1L));
- }
-
public void testSetOpTypeDelete() throws Exception {
DeleteRequest delete = applyScript((Map ctx) -> ctx.put("op", OpType.DELETE.toString()));
assertThat(delete.index(), equalTo("index"));
- assertThat(delete.type(), equalTo("type"));
assertThat(delete.id(), equalTo("1"));
}
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java
index 1dd758150c392..9c2e44f580628 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java
@@ -73,7 +73,6 @@
import org.opensearch.common.CheckedConsumer;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.text.Text;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.AbstractRunnable;
import org.opensearch.common.util.concurrent.OpenSearchRejectedExecutionException;
@@ -288,7 +287,7 @@ public void testScrollResponseSetsTotal() {
public void testScrollResponseBatchingBehavior() throws Exception {
int maxBatches = randomIntBetween(0, 100);
for (int batches = 1; batches < maxBatches; batches++) {
- Hit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0);
+ Hit hit = new ScrollableHitSource.BasicHit("index", "id", 0);
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null);
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction();
simulateScrollResponse(action, System.nanoTime(), 0, response);
@@ -315,7 +314,7 @@ public void testBulkResponseSetsLotsOfStatus() throws Exception {
responses[i] = new BulkItemResponse(
i,
randomFrom(DocWriteRequest.OpType.values()),
- new Failure(shardId.getIndexName(), "type", "id" + i, new VersionConflictEngineException(shardId, "id", "test"))
+ new Failure(shardId.getIndexName(), "id" + i, new VersionConflictEngineException(shardId, "id", "test"))
);
continue;
}
@@ -342,15 +341,7 @@ public void testBulkResponseSetsLotsOfStatus() throws Exception {
}
final int seqNo = randomInt(20);
final int primaryTerm = randomIntBetween(1, 16);
- final IndexResponse response = new IndexResponse(
- shardId,
- "type",
- "id" + i,
- seqNo,
- primaryTerm,
- randomInt(),
- createdResponse
- );
+ final IndexResponse response = new IndexResponse(shardId, "id" + i, seqNo, primaryTerm, randomInt(), createdResponse);
responses[i] = new BulkItemResponse(i, opType, response);
}
assertExactlyOnce(onSuccess -> new DummyAsyncBulkByScrollAction().onBulkResponse(new BulkResponse(responses, 0), onSuccess));
@@ -433,7 +424,7 @@ public void testSearchTimeoutsAbortRequest() throws Exception {
* Mimicks bulk indexing failures.
*/
public void testBulkFailuresAbortRequest() throws Exception {
- Failure failure = new Failure("index", "type", "id", new RuntimeException("test"));
+ Failure failure = new Failure("index", "id", new RuntimeException("test"));
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction();
BulkResponse bulkResponse = new BulkResponse(
new BulkItemResponse[] { new BulkItemResponse(0, DocWriteRequest.OpType.CREATE, failure) },
@@ -456,7 +447,7 @@ protected AbstractAsyncBulkByScrollAction.RequestWrapper> buildRequest(Hit doc
throw new RuntimeException("surprise");
}
};
- ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0);
+ ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "id", 0);
hit.setSource(new BytesArray("{}"), XContentType.JSON);
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null);
simulateScrollResponse(action, System.nanoTime(), 0, response);
@@ -541,7 +532,7 @@ protected RequestWrapper> buildRequest(Hit doc) {
action.start();
// create a simulated response.
- SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
+ SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
SearchHits hits = new SearchHits(
IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new),
new TotalHits(0, TotalHits.Relation.EQUAL_TO),
@@ -597,7 +588,7 @@ private void bulkRetryTestCase(boolean failWithRejection) throws Exception {
DummyAsyncBulkByScrollAction action = new DummyActionWithoutBackoff();
BulkRequest request = new BulkRequest();
for (int i = 0; i < size + 1; i++) {
- request.add(new IndexRequest("index", "type", "id" + i));
+ request.add(new IndexRequest("index").id("id" + i));
}
if (failWithRejection) {
action.sendBulkRequest(request, Assert::fail);
@@ -946,7 +937,6 @@ protected void
IndexRequest index = (IndexRequest) item;
response = new IndexResponse(
shardId,
- index.type(),
index.id() == null ? "dummy_id" : index.id(),
randomInt(20),
randomIntBetween(1, 16),
@@ -957,7 +947,6 @@ protected void
UpdateRequest update = (UpdateRequest) item;
response = new UpdateResponse(
shardId,
- update.type(),
update.id(),
randomNonNegativeLong(),
randomIntBetween(1, Integer.MAX_VALUE),
@@ -968,7 +957,6 @@ protected void
DeleteRequest delete = (DeleteRequest) item;
response = new DeleteResponse(
shardId,
- delete.type(),
delete.id(),
randomInt(20),
randomIntBetween(1, 16),
@@ -982,12 +970,7 @@ protected void
responses[i] = new BulkItemResponse(
i,
item.opType(),
- new Failure(
- response.getIndex(),
- response.getType(),
- response.getId(),
- new OpenSearchRejectedExecutionException()
- )
+ new Failure(response.getIndex(), response.getId(), new OpenSearchRejectedExecutionException())
);
} else {
responses[i] = new BulkItemResponse(i, item.opType(), response);
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseTests.java
index d2cb565547875..cd0ee066aec7f 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseTests.java
@@ -80,7 +80,7 @@ public void testMergeConstructor() {
List bulkFailures = frequently()
? emptyList()
: IntStream.range(0, between(1, 3))
- .mapToObj(j -> new BulkItemResponse.Failure("idx", "type", "id", new Exception()))
+ .mapToObj(j -> new BulkItemResponse.Failure("idx", "id", new Exception()))
.collect(Collectors.toList());
allBulkFailures.addAll(bulkFailures);
List searchFailures = frequently()
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java
index 1bab1db908ca9..bd43f05225f65 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java
@@ -77,7 +77,6 @@
public class CancelTests extends ReindexTestCase {
protected static final String INDEX = "reindex-cancel-index";
- protected static final String TYPE = "reindex-cancel-type";
// Semaphore used to allow & block indexing operations during the test
private static final Semaphore ALLOWED_OPERATIONS = new Semaphore(0);
@@ -116,7 +115,7 @@ private void testCancel(
false,
true,
IntStream.range(0, numDocs)
- .mapToObj(i -> client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i))
+ .mapToObj(i -> client().prepareIndex().setIndex(INDEX).setId(String.valueOf(i)).setSource("n", i))
.collect(Collectors.toList())
);
@@ -247,12 +246,12 @@ public static TaskInfo findTaskToCancel(String actionName, int workerCount) {
}
public void testReindexCancel() throws Exception {
- testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest", TYPE), (response, total, modified) -> {
+ testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest"), (response, total, modified) -> {
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")));
refresh("dest");
- assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified);
- }, equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]"));
+ assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
+ }, equalTo("reindex from [" + INDEX + "] to [dest]"));
}
public void testUpdateByQueryCancel() throws Exception {
@@ -289,13 +288,13 @@ public void testDeleteByQueryCancel() throws Exception {
public void testReindexCancelWithWorkers() throws Exception {
testCancel(
ReindexAction.NAME,
- reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest", TYPE).setSlices(5),
+ reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest").setSlices(5),
(response, total, modified) -> {
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5)));
refresh("dest");
- assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified);
+ assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
},
- equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")
+ equalTo("reindex from [" + INDEX + "] to [dest]")
);
}
@@ -355,16 +354,16 @@ public static class BlockingOperationListener implements IndexingOperationListen
@Override
public Engine.Index preIndex(ShardId shardId, Engine.Index index) {
- return preCheck(index, index.type());
+ return preCheck(index);
}
@Override
public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) {
- return preCheck(delete, delete.type());
+ return preCheck(delete);
}
- private T preCheck(T operation, String type) {
- if ((TYPE.equals(type) == false) || (operation.origin() != Origin.PRIMARY)) {
+ private T preCheck(T operation) {
+ if ((operation.origin() != Origin.PRIMARY)) {
return operation;
}
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java
index e0c8bf604ed27..8af217e5140e1 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java
@@ -47,7 +47,6 @@
import org.opensearch.client.support.AbstractClient;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.text.Text;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.OpenSearchRejectedExecutionException;
import org.opensearch.search.SearchHit;
@@ -183,7 +182,7 @@ public void testScrollKeepAlive() {
private SearchResponse createSearchResponse() {
// create a simulated response.
- SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
+ SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
SearchHits hits = new SearchHits(
IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new),
new TotalHits(0, TotalHits.Relation.EQUAL_TO),
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java
index 13ca95c01b72d..870f3620062f7 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java
@@ -83,25 +83,25 @@ public void testBasics() throws Exception {
client().prepareIndex("test", "test", "7").setSource("foo", "f")
);
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7);
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 7);
// Deletes two docs that matches "foo:a"
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().deleted(2));
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5);
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 5);
// Deletes the two first docs with limit by size
DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).size(2).refresh(true);
request.source().addSort("foo.keyword", SortOrder.ASC);
assertThat(request.get(), matcher().deleted(2));
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3);
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 3);
// Deletes but match no docs
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().deleted(0));
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3);
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 3);
// Deletes all remaining docs
assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(3));
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0);
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 0);
}
public void testDeleteByQueryWithOneIndex() throws Exception {
@@ -319,7 +319,7 @@ public void testSlices() throws Exception {
client().prepareIndex("test", "test", "6").setSource("foo", "e"),
client().prepareIndex("test", "test", "7").setSource("foo", "f")
);
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7);
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 7);
int slices = randomSlices();
int expectedSlices = expectedSliceStatuses(slices, "test");
@@ -329,14 +329,14 @@ public void testSlices() throws Exception {
deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(),
matcher().deleted(2).slices(hasSize(expectedSlices))
);
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5);
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 5);
// Delete remaining docs
assertThat(
deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(),
matcher().deleted(5).slices(hasSize(expectedSlices))
);
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0);
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 0);
}
public void testMultipleSources() throws Exception {
@@ -369,7 +369,7 @@ public void testMultipleSources() throws Exception {
);
for (String index : docs.keySet()) {
- assertHitCount(client().prepareSearch(index).setTypes("test").setSize(0).get(), 0);
+ assertHitCount(client().prepareSearch(index).setSize(0).get(), 0);
}
}
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java
index 581cb19b0dd8b..652e4d4d34fd5 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java
@@ -59,23 +59,23 @@ public void testFiltering() throws Exception {
assertHitCount(client().prepareSearch("source").setSize(0).get(), 4);
// Copy all the docs
- ReindexRequestBuilder copy = reindex().source("source").destination("dest", "type").refresh(true);
+ ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true);
assertThat(copy.get(), matcher().created(4));
assertHitCount(client().prepareSearch("dest").setSize(0).get(), 4);
// Now none of them
createIndex("none");
- copy = reindex().source("source").destination("none", "type").filter(termQuery("foo", "no_match")).refresh(true);
+ copy = reindex().source("source").destination("none").filter(termQuery("foo", "no_match")).refresh(true);
assertThat(copy.get(), matcher().created(0));
assertHitCount(client().prepareSearch("none").setSize(0).get(), 0);
// Now half of them
- copy = reindex().source("source").destination("dest_half", "type").filter(termQuery("foo", "a")).refresh(true);
+ copy = reindex().source("source").destination("dest_half").filter(termQuery("foo", "a")).refresh(true);
assertThat(copy.get(), matcher().created(2));
assertHitCount(client().prepareSearch("dest_half").setSize(0).get(), 2);
// Limit with maxDocs
- copy = reindex().source("source").destination("dest_size_one", "type").maxDocs(1).refresh(true);
+ copy = reindex().source("source").destination("dest_size_one").maxDocs(1).refresh(true);
assertThat(copy.get(), matcher().created(1));
assertHitCount(client().prepareSearch("dest_size_one").setSize(0).get(), 1);
}
@@ -91,7 +91,7 @@ public void testCopyMany() throws Exception {
assertHitCount(client().prepareSearch("source").setSize(0).get(), max);
// Copy all the docs
- ReindexRequestBuilder copy = reindex().source("source").destination("dest", "type").refresh(true);
+ ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true);
// Use a small batch size so we have to use more than one batch
copy.source().setSize(5);
assertThat(copy.get(), matcher().created(max).batches(max, 5));
@@ -99,7 +99,7 @@ public void testCopyMany() throws Exception {
// Copy some of the docs
int half = max / 2;
- copy = reindex().source("source").destination("dest_half", "type").refresh(true);
+ copy = reindex().source("source").destination("dest_half").refresh(true);
// Use a small batch size so we have to use more than one batch
copy.source().setSize(5);
copy.maxDocs(half);
@@ -121,15 +121,15 @@ public void testCopyManyWithSlices() throws Exception {
int expectedSlices = expectedSliceStatuses(slices, "source");
// Copy all the docs
- ReindexRequestBuilder copy = reindex().source("source").destination("dest", "type").refresh(true).setSlices(slices);
+ ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true).setSlices(slices);
// Use a small batch size so we have to use more than one batch
copy.source().setSize(5);
assertThat(copy.get(), matcher().created(max).batches(greaterThanOrEqualTo(max / 5)).slices(hasSize(expectedSlices)));
- assertHitCount(client().prepareSearch("dest").setTypes("type").setSize(0).get(), max);
+ assertHitCount(client().prepareSearch("dest").setSize(0).get(), max);
// Copy some of the docs
int half = max / 2;
- copy = reindex().source("source").destination("dest_half", "type").refresh(true).setSlices(slices);
+ copy = reindex().source("source").destination("dest_half").refresh(true).setSlices(slices);
// Use a small batch size so we have to use more than one batch
copy.source().setSize(5);
copy.maxDocs(half);
@@ -162,7 +162,7 @@ public void testMultipleSources() throws Exception {
int expectedSlices = expectedSliceStatuses(slices, docs.keySet());
String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]);
- ReindexRequestBuilder request = reindex().source(sourceIndexNames).destination("dest", "type").refresh(true).setSlices(slices);
+ ReindexRequestBuilder request = reindex().source(sourceIndexNames).destination("dest").refresh(true).setSlices(slices);
BulkByScrollResponse response = request.get();
assertThat(response, matcher().created(allDocs.size()).slices(hasSize(expectedSlices)));
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java
index bd6eba132af21..85f0c3c24abee 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java
@@ -60,20 +60,6 @@ public void testSettingIndexToNullIsError() throws Exception {
}
}
- public void testSetType() throws Exception {
- Object type = randomFrom(new Object[] { 234, 234L, "pancake" });
- IndexRequest index = applyScript((Map ctx) -> ctx.put("_type", type));
- assertEquals(type.toString(), index.type());
- }
-
- public void testSettingTypeToNullIsError() throws Exception {
- try {
- applyScript((Map ctx) -> ctx.put("_type", null));
- } catch (NullPointerException e) {
- assertThat(e.getMessage(), containsString("Can't reindex without a destination type!"));
- }
- }
-
public void testSetId() throws Exception {
Object id = randomFrom(new Object[] { null, 234, 234L, "pancake" });
IndexRequest index = applyScript((Map ctx) -> ctx.put("_id", id));
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java
index 7181fa9f4d273..e516be131e6a4 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java
@@ -130,7 +130,7 @@ private void setupSourceAbsent() throws Exception {
client().prepareIndex("source", "_doc", "test").setVersionType(EXTERNAL).setVersion(SOURCE_VERSION).setSource("foo", "source")
);
- assertEquals(SOURCE_VERSION, client().prepareGet("source", "_doc", "test").get().getVersion());
+ assertEquals(SOURCE_VERSION, client().prepareGet("source", "test").get().getVersion());
}
private void setupDest(int version) throws Exception {
@@ -140,7 +140,7 @@ private void setupDest(int version) throws Exception {
client().prepareIndex("dest", "_doc", "test").setVersionType(EXTERNAL).setVersion(version).setSource("foo", "dest")
);
- assertEquals(version, client().prepareGet("dest", "_doc", "test").get().getVersion());
+ assertEquals(version, client().prepareGet("dest", "test").get().getVersion());
}
private void setupDestOlder() throws Exception {
@@ -152,7 +152,7 @@ private void setupDestNewer() throws Exception {
}
private void assertDest(String fooValue, int version) {
- GetResponse get = client().prepareGet("dest", "_doc", "test").get();
+ GetResponse get = client().prepareGet("dest", "test").get();
assertEquals(fooValue, get.getSource().get("foo"));
assertEquals(version, get.getVersion());
}
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java
index 5341bcd0fee5d..11e1f6b478fe3 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java
@@ -33,8 +33,6 @@
package org.opensearch.index.reindex;
import org.opensearch.common.xcontent.NamedXContentRegistry;
-import org.opensearch.rest.RestRequest;
-import org.opensearch.rest.action.search.RestSearchAction;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
@@ -52,26 +50,6 @@ public void setUpAction() {
controller().registerHandler(action);
}
- public void testTypeInPath() throws IOException {
- RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST)
- .withPath("/some_index/some_type/_delete_by_query")
- .build();
-
- // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
- verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
-
- dispatchRequest(request);
-
- // checks the type in the URL is propagated correctly to the request object
- // only works after the request is dispatched, so its params are filled from url.
- DeleteByQueryRequest dbqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY);
- assertArrayEquals(new String[] { "some_type" }, dbqRequest.getDocTypes());
-
- // RestDeleteByQueryAction itself doesn't check for a deprecated type usage
- // checking here for a deprecation from its internal search request
- assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
- }
-
public void testParseEmpty() throws IOException {
final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build();
DeleteByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY);
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java
index 508cfefa1679c..aa8221b045d3f 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java
@@ -38,13 +38,11 @@
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.xcontent.json.JsonXContent;
-import org.opensearch.rest.RestRequest.Method;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
import java.io.IOException;
-import java.util.Arrays;
import java.util.Collections;
import static java.util.Collections.singletonMap;
@@ -102,52 +100,4 @@ public void testSetScrollTimeout() throws IOException {
assertEquals("10m", request.getScrollTime().toString());
}
}
-
- /**
- * test deprecation is logged if one or more types are used in source search request inside reindex
- */
- public void testTypeInSource() throws IOException {
- FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST)
- .withPath("/_reindex");
- XContentBuilder b = JsonXContent.contentBuilder().startObject();
- {
- b.startObject("source");
- {
- b.field("type", randomFrom(Arrays.asList("\"t1\"", "[\"t1\", \"t2\"]", "\"_doc\"")));
- }
- b.endObject();
- }
- b.endObject();
- requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON);
-
- // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
- verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
-
- dispatchRequest(requestBuilder.build());
- assertWarnings(ReindexRequest.TYPES_DEPRECATION_MESSAGE);
- }
-
- /**
- * test deprecation is logged if a type is used in the destination index request inside reindex
- */
- public void testTypeInDestination() throws IOException {
- FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST)
- .withPath("/_reindex");
- XContentBuilder b = JsonXContent.contentBuilder().startObject();
- {
- b.startObject("dest");
- {
- b.field("type", (randomBoolean() ? "_doc" : randomAlphaOfLength(4)));
- }
- b.endObject();
- }
- b.endObject();
- requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON);
-
- // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
- verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
-
- dispatchRequest(requestBuilder.build());
- assertWarnings(ReindexRequest.TYPES_DEPRECATION_MESSAGE);
- }
}
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java
index 743f0e8a852f4..ef5a94f2e1798 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java
@@ -33,8 +33,6 @@
package org.opensearch.index.reindex;
import org.opensearch.common.xcontent.NamedXContentRegistry;
-import org.opensearch.rest.RestRequest;
-import org.opensearch.rest.action.search.RestSearchAction;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
@@ -53,26 +51,6 @@ public void setUpAction() {
controller().registerHandler(action);
}
- public void testTypeInPath() throws IOException {
- RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST)
- .withPath("/some_index/some_type/_update_by_query")
- .build();
-
- // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
- verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
-
- dispatchRequest(request);
-
- // checks the type in the URL is propagated correctly to the request object
- // only works after the request is dispatched, so its params are filled from url.
- UpdateByQueryRequest ubqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY);
- assertArrayEquals(new String[] { "some_type" }, ubqRequest.getDocTypes());
-
- // RestUpdateByQueryAction itself doesn't check for a deprecated type usage
- // checking here for a deprecation from its internal search request
- assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
- }
-
public void testParseEmpty() throws IOException {
final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build();
UpdateByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY);
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java
index d803eff25d081..3ed1f7b563546 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java
@@ -55,36 +55,36 @@ public void testBasics() throws Exception {
client().prepareIndex("test", "test", "3").setSource("foo", "b"),
client().prepareIndex("test", "test", "4").setSource("foo", "c")
);
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4);
- assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion());
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 4);
+ assertEquals(1, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(1, client().prepareGet("test", "4").get().getVersion());
// Reindex all the docs
assertThat(updateByQuery().source("test").refresh(true).get(), matcher().updated(4));
- assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "4").get().getVersion());
// Now none of them
assertThat(updateByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().updated(0));
- assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "4").get().getVersion());
// Now half of them
assertThat(updateByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().updated(2));
- assertEquals(3, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(3, client().prepareGet("test", "test", "2").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "3").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion());
+ assertEquals(3, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(3, client().prepareGet("test", "2").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "3").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "4").get().getVersion());
// Limit with size
UpdateByQueryRequestBuilder request = updateByQuery().source("test").size(3).refresh(true);
request.source().addSort("foo.keyword", SortOrder.ASC);
assertThat(request.get(), matcher().updated(3));
// Only the first three documents are updated because of sort
- assertEquals(4, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(4, client().prepareGet("test", "test", "2").get().getVersion());
- assertEquals(3, client().prepareGet("test", "test", "3").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion());
+ assertEquals(4, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(4, client().prepareGet("test", "2").get().getVersion());
+ assertEquals(3, client().prepareGet("test", "3").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "4").get().getVersion());
}
public void testSlices() throws Exception {
@@ -95,9 +95,9 @@ public void testSlices() throws Exception {
client().prepareIndex("test", "test", "3").setSource("foo", "b"),
client().prepareIndex("test", "test", "4").setSource("foo", "c")
);
- assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4);
- assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion());
+ assertHitCount(client().prepareSearch("test").setSize(0).get(), 4);
+ assertEquals(1, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(1, client().prepareGet("test", "4").get().getVersion());
int slices = randomSlices(2, 10);
int expectedSlices = expectedSliceStatuses(slices, "test");
@@ -107,26 +107,26 @@ public void testSlices() throws Exception {
updateByQuery().source("test").refresh(true).setSlices(slices).get(),
matcher().updated(4).slices(hasSize(expectedSlices))
);
- assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "4").get().getVersion());
// Now none of them
assertThat(
updateByQuery().source("test").filter(termQuery("foo", "no_match")).setSlices(slices).refresh(true).get(),
matcher().updated(0).slices(hasSize(expectedSlices))
);
- assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "4").get().getVersion());
// Now half of them
assertThat(
updateByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(),
matcher().updated(2).slices(hasSize(expectedSlices))
);
- assertEquals(3, client().prepareGet("test", "test", "1").get().getVersion());
- assertEquals(3, client().prepareGet("test", "test", "2").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "3").get().getVersion());
- assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion());
+ assertEquals(3, client().prepareGet("test", "1").get().getVersion());
+ assertEquals(3, client().prepareGet("test", "2").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "3").get().getVersion());
+ assertEquals(2, client().prepareGet("test", "4").get().getVersion());
}
public void testMultipleSources() throws Exception {
@@ -159,7 +159,7 @@ public void testMultipleSources() throws Exception {
String index = entry.getKey();
List indexDocs = entry.getValue();
int randomDoc = between(0, indexDocs.size() - 1);
- assertEquals(2, client().prepareGet(index, "test", Integer.toString(randomDoc)).get().getVersion());
+ assertEquals(2, client().prepareGet(index, Integer.toString(randomDoc)).get().getVersion());
}
}
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java
index 3685fc5f124c9..3e4c61432c34a 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java
@@ -76,7 +76,7 @@ public void testUpdateWhileReindexing() throws Exception {
try {
for (int i = 0; i < MAX_MUTATIONS; i++) {
- GetResponse get = client().prepareGet("test", "test", "test").get();
+ GetResponse get = client().prepareGet("test", "test").get();
assertEquals(value.get(), get.getSource().get("test"));
value.set(randomSimpleString(random()));
IndexRequestBuilder index = client().prepareIndex("test", "test", "test")
@@ -106,7 +106,7 @@ public void testUpdateWhileReindexing() throws Exception {
get.getVersion(),
attempts
);
- get = client().prepareGet("test", "test", "test").get();
+ get = client().prepareGet("test", "test").get();
}
}
}
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java
index b72f66ce11277..ce982dcb6bd34 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java
@@ -56,7 +56,7 @@ public void testModifyingCtxNotAllowed() {
* error message to the user, not some ClassCastException.
*/
Object[] options = new Object[] { "cat", new Object(), 123, new Date(), Math.PI };
- for (String ctxVar : new String[] { "_index", "_type", "_id", "_version", "_routing" }) {
+ for (String ctxVar : new String[] { "_index", "_id", "_version", "_routing" }) {
try {
applyScript((Map ctx) -> ctx.put(ctxVar, randomFrom(options)));
} catch (IllegalArgumentException e) {
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java
index 541134f9403ba..c349bc54bcbd9 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java
@@ -78,27 +78,25 @@ public void testIntialSearchPath() {
SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder());
assertEquals("/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("a");
- searchRequest.types("b");
- assertEquals("/a/b/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
+ assertEquals("/a/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("a", "b");
- searchRequest.types("c", "d");
- assertEquals("/a,b/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
+ assertEquals("/a,b/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("cat,");
- assertEquals("/cat%2C/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
+ assertEquals("/cat%2C/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("cat/");
- assertEquals("/cat%2F/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
+ assertEquals("/cat%2F/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("cat/", "dog");
- assertEquals("/cat%2F,dog/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
+ assertEquals("/cat%2F,dog/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
// test a specific date math + all characters that need escaping.
searchRequest.indices("", "<>/{}|+:,");
assertEquals(
- "/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/c,d/_search",
+ "/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/_search",
initialSearch(searchRequest, query, remoteVersion).getEndpoint()
);
// pass-through if already escaped.
searchRequest.indices("%2f", "%3a");
- assertEquals("/%2f,%3a/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
+ assertEquals("/%2f,%3a/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
assertWarnings(DEPRECATED_URL_ENCODED_INDEX_WARNING);
@@ -107,20 +105,6 @@ public void testIntialSearchPath() {
expectBadStartRequest(searchRequest, "Index", ",", "%2fcat,");
searchRequest.indices("%3ccat/");
expectBadStartRequest(searchRequest, "Index", "/", "%3ccat/");
-
- searchRequest.indices("ok");
- searchRequest.types("cat,");
- expectBadStartRequest(searchRequest, "Type", ",", "cat,");
- searchRequest.types("cat,", "dog");
- expectBadStartRequest(searchRequest, "Type", ",", "cat,");
- searchRequest.types("dog", "cat,");
- expectBadStartRequest(searchRequest, "Type", ",", "cat,");
- searchRequest.types("cat/");
- expectBadStartRequest(searchRequest, "Type", "/", "cat/");
- searchRequest.types("cat/", "dog");
- expectBadStartRequest(searchRequest, "Type", "/", "cat/");
- searchRequest.types("dog", "cat/");
- expectBadStartRequest(searchRequest, "Type", "/", "cat/");
}
private void expectBadStartRequest(SearchRequest searchRequest, String type, String bad, String failed) {
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java
index 54cb39c736ff8..337bc67796f8e 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java
@@ -178,7 +178,6 @@ public void testParseStartOk() throws Exception {
assertThat(r.getFailures(), empty());
assertThat(r.getHits(), hasSize(1));
assertEquals("test", r.getHits().get(0).getIndex());
- assertEquals("test", r.getHits().get(0).getType());
assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId());
assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString());
assertNull(r.getHits().get(0).getRouting());
@@ -196,7 +195,6 @@ public void testParseScrollOk() throws Exception {
assertThat(r.getFailures(), empty());
assertThat(r.getHits(), hasSize(1));
assertEquals("test", r.getHits().get(0).getIndex());
- assertEquals("test", r.getHits().get(0).getType());
assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId());
assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString());
assertNull(r.getHits().get(0).getRouting());
@@ -246,7 +244,6 @@ public void testScanJumpStart() throws Exception {
assertThat(r.getFailures(), empty());
assertThat(r.getHits(), hasSize(1));
assertEquals("test", r.getHits().get(0).getIndex());
- assertEquals("test", r.getHits().get(0).getType());
assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId());
assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString());
assertNull(r.getHits().get(0).getRouting());
@@ -277,7 +274,6 @@ public void testParseRejection() throws Exception {
);
assertThat(r.getHits(), hasSize(1));
assertEquals("test", r.getHits().get(0).getIndex());
- assertEquals("test", r.getHits().get(0).getType());
assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId());
assertEquals("{\"test\":\"test1\"}", r.getHits().get(0).getSource().utf8ToString());
called.set(true);
@@ -308,7 +304,6 @@ public void testParseFailureWithStatus() throws Exception {
);
assertThat(r.getHits(), hasSize(1));
assertEquals("test", r.getHits().get(0).getIndex());
- assertEquals("test", r.getHits().get(0).getType());
assertEquals("10000", r.getHits().get(0).getId());
assertEquals("{\"test\":\"test10000\"}", r.getHits().get(0).getSource().utf8ToString());
called.set(true);
diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml
index c47d8ff0e0756..7783bbd1f9476 100644
--- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml
+++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml
@@ -91,7 +91,6 @@
- skip:
version: "6.7.0 - "
reason: reindex moved to rely on sequence numbers for concurrency control
-
- do:
indices.create:
index: test
@@ -124,7 +123,6 @@
- match: {version_conflicts: 1}
- match: {batches: 1}
- match: {failures.0.index: test}
- - match: {failures.0.type: _doc}
- match: {failures.0.id: "1"}
- match: {failures.0.status: 409}
- match: {failures.0.cause.type: version_conflict_engine_exception}
@@ -145,10 +143,6 @@
---
"Response for version conflict (seq no powered)":
- - skip:
- version: " - 6.6.99"
- reason: reindex moved to rely on sequence numbers for concurrency control
-
- do:
indices.create:
index: test
@@ -181,7 +175,6 @@
- match: {version_conflicts: 1}
- match: {batches: 1}
- match: {failures.0.index: test}
- - match: {failures.0.type: _doc}
- match: {failures.0.id: "1"}
- match: {failures.0.status: 409}
- match: {failures.0.cause.type: version_conflict_engine_exception}
@@ -210,7 +203,6 @@
- do:
index:
index: test
- type: _doc
id: 1
body: { "text": "test" }
- do:
@@ -219,7 +211,6 @@
- do:
index:
index: test
- type: _doc
id: 1
body: { "text": "test2" }
diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml
index b06cd2325571a..7086e048eba3e 100644
--- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml
+++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml
@@ -285,7 +285,7 @@
indices.refresh: {}
- do:
- catch: /\[test\]\[_doc\]\[1\] didn't store _source/
+ catch: /\[test\]\[1\] didn't store _source/
reindex:
body:
source:
diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml
index 770f372c210a8..9c38b13bb1ff0 100644
--- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml
+++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml
@@ -399,9 +399,9 @@
mget:
body:
docs:
- - { _index: index2, _type: _doc, _id: en_123}
- - { _index: index2, _type: _doc, _id: en_456}
- - { _index: index2, _type: _doc, _id: fr_789}
+ - { _index: index2, _id: en_123}
+ - { _index: index2, _id: en_456}
+ - { _index: index2, _id: fr_789}
- is_true: docs.0.found
- match: { docs.0._index: index2 }
diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml
index f17b59e5806fe..4df12b31a0bed 100644
--- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml
+++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml
@@ -104,7 +104,6 @@
- match: {version_conflicts: 1}
- match: {batches: 1}
- match: {failures.0.index: test}
- - match: {failures.0.type: _doc}
- match: {failures.0.id: "1"}
- match: {failures.0.status: 409}
- match: {failures.0.cause.type: version_conflict_engine_exception}
@@ -116,9 +115,6 @@
---
"Response for version conflict (seq no powered)":
- - skip:
- version: " - 6.6.99"
- reason: reindex moved to rely on sequence numbers for concurrency control
- do:
indices.create:
index: test
@@ -147,7 +143,6 @@
- match: {version_conflicts: 1}
- match: {batches: 1}
- match: {failures.0.index: test}
- - match: {failures.0.type: _doc}
- match: {failures.0.id: "1"}
- match: {failures.0.status: 409}
- match: {failures.0.cause.type: version_conflict_engine_exception}
@@ -167,7 +162,6 @@
- do:
index:
index: test
- type: _doc
id: 1
body: { "text": "test" }
- do:
@@ -176,7 +170,6 @@
- do:
index:
index: test
- type: _doc
id: 1
body: { "text": "test2" }
diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml
index c015b1a21c398..7b00fb59b02b2 100644
--- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml
+++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml
@@ -150,7 +150,7 @@
indices.refresh: {}
- do:
- catch: /\[test\]\[_doc\]\[1\] didn't store _source/
+ catch: /\[test\]\[1\] didn't store _source/
update_by_query:
index: test
diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle
index 2e62fdd697ec9..f0029837c7d03 100644
--- a/modules/transport-netty4/build.gradle
+++ b/modules/transport-netty4/build.gradle
@@ -149,10 +149,7 @@ thirdPartyAudit {
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
'org.bouncycastle.cert.X509v3CertificateBuilder',
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
- 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
- 'org.bouncycastle.jce.provider.BouncyCastleProvider',
'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder',
- 'org.bouncycastle.asn1.x500.X500Name',
// from io.netty.handler.ssl.JettyNpnSslEngine (netty)
'org.eclipse.jetty.npn.NextProtoNego$ClientProvider',
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.72.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.72.Final.jar.sha1
deleted file mode 100644
index f1398e52d8c74..0000000000000
--- a/modules/transport-netty4/licenses/netty-buffer-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f306eec3f79541f9b8af9c471a0d5b63b7996272
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.73.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..e5833785ebb7e
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-buffer-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+244a569c9aae973f6f485ac9801d79c1eca36daa
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.72.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.72.Final.jar.sha1
deleted file mode 100644
index f70b5c0909d7c..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-613c4019d687db4e9a5532564e442f83c4474ed7
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.73.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..dcdc1e4e58afe
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+9496a30a349863a4c6fa10d5c36b4f3b495d3a31
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.72.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.72.Final.jar.sha1
deleted file mode 100644
index 8c7611afca886..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a8f062d67303a5e4b2bc2ad48fb4fd8c99108e45
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.73.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..374cfb98614d5
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+1ceeac4429b9bd517dc05e376a144bbe6b6bd038
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.72.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.72.Final.jar.sha1
deleted file mode 100644
index bfdf4a5cf8585..0000000000000
--- a/modules/transport-netty4/licenses/netty-common-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a55bac9c3af5f59828207b551a96ac19bbfc341e
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.73.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..e80a6e2569d81
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-common-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+27731b58d741b6faa6a00fa3285e7a55cc47be01
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.72.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.72.Final.jar.sha1
deleted file mode 100644
index d6cc1771a2964..0000000000000
--- a/modules/transport-netty4/licenses/netty-handler-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9feee089fee606c64be90c0332db9aef1f7d8e46
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.73.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..0e227997874bf
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-handler-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+1a2231c0074f88254865c3769a4b5842939ea04d
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.72.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.72.Final.jar.sha1
deleted file mode 100644
index d08a6f6e7e42d..0000000000000
--- a/modules/transport-netty4/licenses/netty-resolver-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4ff458458ea32ed1156086820b624a815fcbf2c0
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.73.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..ba24531724fb5
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-resolver-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+bfe83710f0c1739019613e81a06101020ca65def
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.72.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.72.Final.jar.sha1
deleted file mode 100644
index 603f145303012..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-99138b436a584879355aca8fe3c64b46227d5d79
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.73.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..6a8647497f210
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+abb155ddff196ccedfe85b810d4b9375ef85fcfa
\ No newline at end of file
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
index e95a730c2b755..08df9259d475f 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
@@ -94,7 +94,7 @@ public void testLimitsInFlightRequests() throws Exception {
List> requests = new ArrayList<>();
for (int i = 0; i < 150; i++) {
- requests.add(Tuple.tuple("/index/type/_bulk", bulkRequest));
+ requests.add(Tuple.tuple("/index/_bulk", bulkRequest));
}
HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class);
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/OpenSearchLoggingHandlerIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/OpenSearchLoggingHandlerIT.java
index c1c689471fc82..ea3f21dd0ed3b 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/OpenSearchLoggingHandlerIT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/OpenSearchLoggingHandlerIT.java
@@ -53,6 +53,7 @@ public class OpenSearchLoggingHandlerIT extends OpenSearchNetty4IntegTestCase {
public void setUp() throws Exception {
super.setUp();
appender = MockLogAppender.createForLoggers(
+ "^[^\n]+$", /* Only consider single line log statements */
LogManager.getLogger(OpenSearchLoggingHandler.class),
LogManager.getLogger(TransportLogger.class),
LogManager.getLogger(TcpTransport.class)
@@ -66,12 +67,13 @@ public void tearDown() throws Exception {
@TestLogging(value = "org.opensearch.transport.netty4.OpenSearchLoggingHandler:trace,org.opensearch.transport.TransportLogger:trace", reason = "to ensure we log network events on TRACE level")
public void testLoggingHandler() {
- final String writePattern = ".*\\[length: \\d+"
+ final String writePattern = "^.*\\[length: \\d+"
+ ", request id: \\d+"
+ ", type: request"
- + ", version: .*"
+ + ", version: [^,]+"
+ + ", header size: \\d+B"
+ ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]"
- + " WRITE: \\d+B";
+ + " WRITE: \\d+B$";
final MockLogAppender.LoggingExpectation writeExpectation = new MockLogAppender.PatternSeenEventExpectation(
"hot threads request",
TransportLogger.class.getCanonicalName(),
@@ -86,12 +88,12 @@ public void testLoggingHandler() {
"*FLUSH*"
);
- final String readPattern = ".*\\[length: \\d+"
+ final String readPattern = "^.*\\[length: \\d+"
+ ", request id: \\d+"
+ ", type: request"
- + ", version: .*"
+ + ", version: [^,]+"
+ ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]"
- + " READ: \\d+B";
+ + " READ: \\d+B$";
final MockLogAppender.LoggingExpectation readExpectation = new MockLogAppender.PatternSeenEventExpectation(
"hot threads request",
diff --git a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java
index a8fc705363bef..1593488701e26 100644
--- a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java
+++ b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java
@@ -86,42 +86,6 @@ public void testIndexExists() throws IOException {
headTestCase("/test", singletonMap("pretty", "true"), greaterThan(0));
}
- public void testTypeExists() throws IOException {
- createTestDoc();
- headTestCase(
- "/test/_mapping/_doc",
- emptyMap(),
- OK.getStatus(),
- greaterThan(0),
- "Type exists requests are deprecated, as types have been deprecated."
- );
- headTestCase(
- "/test/_mapping/_doc",
- singletonMap("pretty", "true"),
- OK.getStatus(),
- greaterThan(0),
- "Type exists requests are deprecated, as types have been deprecated."
- );
- }
-
- public void testTypeDoesNotExist() throws IOException {
- createTestDoc();
- headTestCase(
- "/test/_mapping/does-not-exist",
- emptyMap(),
- NOT_FOUND.getStatus(),
- greaterThan(0),
- "Type exists requests are deprecated, as types have been deprecated."
- );
- headTestCase(
- "/text/_mapping/test,does-not-exist",
- emptyMap(),
- NOT_FOUND.getStatus(),
- greaterThan(0),
- "Type exists requests are deprecated, as types have been deprecated."
- );
- }
-
public void testAliasExists() throws IOException {
createTestDoc();
try (XContentBuilder builder = jsonBuilder()) {
diff --git a/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java b/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java
index b967298b30a41..12120e365fe29 100644
--- a/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java
+++ b/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java
@@ -99,7 +99,6 @@ public void testBasicUsage() throws Exception {
// searching for either of the terms should return both results since they collate to the same value
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
@@ -143,7 +142,6 @@ public void testMultipleValues() throws Exception {
// using sort mode = max, values B and C will be used for the sort
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", "a"))
@@ -159,7 +157,6 @@ public void testMultipleValues() throws Exception {
// same thing, using different sort mode that will use a for both docs
request = new SearchRequest().indices(index)
- .types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", "a"))
@@ -207,7 +204,6 @@ public void testNormalization() throws Exception {
// searching for either of the terms should return both results since they collate to the same value
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
@@ -253,7 +249,6 @@ public void testSecondaryStrength() throws Exception {
);
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
@@ -300,7 +295,6 @@ public void testIgnorePunctuation() throws Exception {
);
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
@@ -348,7 +342,6 @@ public void testIgnoreWhitespace() throws Exception {
);
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(
new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.ASC) // secondary sort
// should kick in on
@@ -391,7 +384,6 @@ public void testNumerics() throws Exception {
);
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
SearchResponse response = client().search(request).actionGet();
@@ -434,7 +426,6 @@ public void testIgnoreAccentsButNotCase() throws Exception {
);
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.DESC));
SearchResponse response = client().search(request).actionGet();
@@ -472,7 +463,6 @@ public void testUpperCaseFirst() throws Exception {
);
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
SearchResponse response = client().search(request).actionGet();
@@ -522,7 +512,6 @@ public void testCustomRules() throws Exception {
);
SearchRequest request = new SearchRequest().indices(index)
- .types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
diff --git a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IndexableBinaryStringTools.java b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IndexableBinaryStringTools.java
index eb7e006857f07..c473ca53c6404 100644
--- a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IndexableBinaryStringTools.java
+++ b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IndexableBinaryStringTools.java
@@ -48,7 +48,6 @@
* padding is indistinguishable from valid information. To overcome this
* problem, a char is appended, indicating the number of encoded bytes in the
* final content char.
- *
*
* @deprecated Implement {@link TermToBytesRefAttribute} and store bytes directly
* instead. This class WAS removed in Lucene 5.0
diff --git a/plugins/analysis-icu/src/main/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapper.java
index e6a33c7545851..59b01b0ddb466 100644
--- a/plugins/analysis-icu/src/main/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapper.java
+++ b/plugins/analysis-icu/src/main/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapper.java
@@ -120,12 +120,12 @@ public String typeName() {
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
- return new SourceValueFetcher(name(), mapperService, nullValue) {
+ return new SourceValueFetcher(name(), context, nullValue) {
@Override
protected String parseSourceValue(Object value) {
String keywordValue = value.toString();
diff --git a/plugins/analysis-stempel/src/main/java/org/opensearch/index/analysis/pl/PolishAnalyzerProvider.java b/plugins/analysis-stempel/src/main/java/org/opensearch/index/analysis/pl/PolishAnalyzerProvider.java
index 5a0cf81cc379e..2319b825892ac 100644
--- a/plugins/analysis-stempel/src/main/java/org/opensearch/index/analysis/pl/PolishAnalyzerProvider.java
+++ b/plugins/analysis-stempel/src/main/java/org/opensearch/index/analysis/pl/PolishAnalyzerProvider.java
@@ -46,7 +46,6 @@ public PolishAnalyzerProvider(IndexSettings indexSettings, Environment environme
super(indexSettings, name, settings);
analyzer = new PolishAnalyzer(PolishAnalyzer.getDefaultStopSet());
- analyzer.setVersion(version);
}
@Override
diff --git a/plugins/analysis-ukrainian/build.gradle b/plugins/analysis-ukrainian/build.gradle
index e7ba996587e22..60ab39997216c 100644
--- a/plugins/analysis-ukrainian/build.gradle
+++ b/plugins/analysis-ukrainian/build.gradle
@@ -36,9 +36,9 @@ opensearchplugin {
dependencies {
api "org.apache.lucene:lucene-analyzers-morfologik:${versions.lucene}"
- api "org.carrot2:morfologik-stemming:2.1.1"
+ api "org.carrot2:morfologik-stemming:2.1.8"
api "org.carrot2:morfologik-fsa:2.1.1"
- api "ua.net.nlp:morfologik-ukrainian-search:3.7.5"
+ api "ua.net.nlp:morfologik-ukrainian-search:4.9.1"
}
restResources {
diff --git a/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.1.jar.sha1
deleted file mode 100644
index 22af41d2b6b1b..0000000000000
--- a/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5c169bab2e7dd04f5cb03d179a73a4339cc1d0a2
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.8.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.8.jar.sha1
new file mode 100644
index 0000000000000..6dfcc82f05b39
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.8.jar.sha1
@@ -0,0 +1 @@
+409fa92db4cfb0f90a33d303732a4882cee3d1e7
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1
deleted file mode 100644
index 446e7a91161a8..0000000000000
--- a/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2b8c8fbd740164d220ca7d18605b8b2092e163e9
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-4.9.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-4.9.1.jar.sha1
new file mode 100644
index 0000000000000..31035a1593bbc
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-4.9.1.jar.sha1
@@ -0,0 +1 @@
+98541e2d3e95d69244829c2855b10686b344c3b3
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/src/main/java/org/opensearch/index/analysis/UkrainianAnalyzerProvider.java b/plugins/analysis-ukrainian/src/main/java/org/opensearch/index/analysis/UkrainianAnalyzerProvider.java
index b6bb76e0c9893..9beced7d465a6 100644
--- a/plugins/analysis-ukrainian/src/main/java/org/opensearch/index/analysis/UkrainianAnalyzerProvider.java
+++ b/plugins/analysis-ukrainian/src/main/java/org/opensearch/index/analysis/UkrainianAnalyzerProvider.java
@@ -48,7 +48,6 @@ public UkrainianAnalyzerProvider(IndexSettings indexSettings, Environment env, S
Analysis.parseStopWords(env, settings, UkrainianMorfologikAnalyzer.getDefaultStopSet()),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
);
- analyzer.setVersion(version);
}
@Override
diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle
index 8aac387cea6bf..7bb9250ea40a6 100644
--- a/plugins/discovery-azure-classic/build.gradle
+++ b/plugins/discovery-azure-classic/build.gradle
@@ -54,7 +54,7 @@ dependencies {
api "commons-codec:commons-codec:${versions.commonscodec}"
api "commons-lang:commons-lang:2.6"
api "commons-io:commons-io:2.7"
- api 'javax.mail:mail:1.4.5'
+ api 'javax.mail:mail:1.4.7'
api 'javax.inject:javax.inject:1'
api "com.sun.jersey:jersey-client:${versions.jersey}"
api "com.sun.jersey:jersey-core:${versions.jersey}"
@@ -64,7 +64,7 @@ dependencies {
// HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here,
// and whitelist this hack in JarHell
- api 'javax.xml.bind:jaxb-api:2.2.2'
+ api 'javax.xml.bind:jaxb-api:2.3.1'
}
restResources {
diff --git a/plugins/discovery-azure-classic/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/jaxb-api-2.2.2.jar.sha1
deleted file mode 100644
index a37e187238933..0000000000000
--- a/plugins/discovery-azure-classic/licenses/jaxb-api-2.2.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-aeb3021ca93dde265796d82015beecdcff95bf09
\ No newline at end of file
diff --git a/plugins/discovery-azure-classic/licenses/jaxb-api-2.3.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/jaxb-api-2.3.1.jar.sha1
new file mode 100644
index 0000000000000..f4434214e1eec
--- /dev/null
+++ b/plugins/discovery-azure-classic/licenses/jaxb-api-2.3.1.jar.sha1
@@ -0,0 +1 @@
+8531ad5ac454cc2deb9d4d32c40c4d7451939b5d
\ No newline at end of file
diff --git a/plugins/discovery-azure-classic/licenses/mail-1.4.5.jar.sha1 b/plugins/discovery-azure-classic/licenses/mail-1.4.5.jar.sha1
deleted file mode 100644
index b79503e0c69d9..0000000000000
--- a/plugins/discovery-azure-classic/licenses/mail-1.4.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-85319c87280f30e1afc54c355f91f44741beac49
diff --git a/plugins/discovery-azure-classic/licenses/mail-1.4.7.jar.sha1 b/plugins/discovery-azure-classic/licenses/mail-1.4.7.jar.sha1
new file mode 100644
index 0000000000000..0b9ba0ce9f186
--- /dev/null
+++ b/plugins/discovery-azure-classic/licenses/mail-1.4.7.jar.sha1
@@ -0,0 +1 @@
+9add058589d5d85adeb625859bf2c5eeaaedf12d
\ No newline at end of file
diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle
index a6d4134d15a9b..7998e0861c7b1 100644
--- a/plugins/discovery-ec2/build.gradle
+++ b/plugins/discovery-ec2/build.gradle
@@ -134,17 +134,8 @@ tasks.named("thirdPartyAudit").configure {
ignoreMissingClasses(
// classes are missing
'javax.jms.Message',
- 'com.amazonaws.jmespath.JmesPathEvaluationVisitor',
'com.amazonaws.jmespath.JmesPathExpression',
- 'com.amazonaws.jmespath.JmesPathField',
- 'com.amazonaws.jmespath.JmesPathFlatten',
- 'com.amazonaws.jmespath.JmesPathIdentity',
- 'com.amazonaws.jmespath.JmesPathLengthFunction',
- 'com.amazonaws.jmespath.JmesPathLiteral',
- 'com.amazonaws.jmespath.JmesPathProjection',
- 'com.amazonaws.jmespath.JmesPathSubExpression',
'com.amazonaws.jmespath.ObjectMapperSingleton',
- 'com.amazonaws.jmespath.OpGreaterThan',
'software.amazon.ion.IonReader',
'software.amazon.ion.IonSystem',
'software.amazon.ion.IonType',
diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.12.5.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.12.5.jar.sha1
deleted file mode 100644
index 797bcf2b161d4..0000000000000
--- a/plugins/discovery-ec2/licenses/jackson-annotations-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-52d929d5bb21d0186fe24c09624cc3ee4bafc3b3
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.12.6.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..48ee3bf53c630
--- /dev/null
+++ b/plugins/discovery-ec2/licenses/jackson-annotations-2.12.6.jar.sha1
@@ -0,0 +1 @@
+9487231edd6b0b1f14692c9cba9e0462809215d1
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.12.5.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.12.5.jar.sha1
deleted file mode 100644
index ca1bd46bc3cd3..0000000000000
--- a/plugins/discovery-ec2/licenses/jackson-databind-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b064cf057f23d3d35390328c5030847efeffedde
\ No newline at end of file
diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.12.6.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..f74842887d31b
--- /dev/null
+++ b/plugins/discovery-ec2/licenses/jackson-databind-2.12.6.jar.sha1
@@ -0,0 +1 @@
+fac216b606c1086e36acea6e572ee61572ad1670
\ No newline at end of file
diff --git a/plugins/mapper-annotated-text/src/main/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java b/plugins/mapper-annotated-text/src/main/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java
index fe99746a5ce79..90d123e86e260 100644
--- a/plugins/mapper-annotated-text/src/main/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java
+++ b/plugins/mapper-annotated-text/src/main/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java
@@ -40,8 +40,8 @@
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer;
import org.opensearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText;
+import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.fetch.FetchSubPhase.HitContext;
-import org.opensearch.search.fetch.subphase.highlight.SearchHighlightContext.Field;
import java.io.IOException;
import java.util.ArrayList;
@@ -55,12 +55,12 @@ public class AnnotatedTextHighlighter extends UnifiedHighlighter {
@Override
protected List loadFieldValues(
CustomUnifiedHighlighter highlighter,
+ QueryShardContext context,
MappedFieldType fieldType,
- Field field,
HitContext hitContext,
boolean forceSource
) throws IOException {
- List fieldValues = super.loadFieldValues(highlighter, fieldType, field, hitContext, forceSource);
+ List fieldValues = super.loadFieldValues(highlighter, context, fieldType, hitContext, forceSource);
List strings = new ArrayList<>(fieldValues.size());
AnnotatedText[] annotations = new AnnotatedText[fieldValues.size()];
diff --git a/plugins/mapper-annotated-text/src/test/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java b/plugins/mapper-annotated-text/src/test/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java
index af94bcfa79367..3e3119094cb69 100644
--- a/plugins/mapper-annotated-text/src/test/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java
+++ b/plugins/mapper-annotated-text/src/test/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java
@@ -42,6 +42,7 @@
import org.opensearch.index.mapper.FieldTypeTestCase;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.Mapper;
+import org.opensearch.index.query.IntervalMode;
import java.io.IOException;
import java.util.Collections;
@@ -51,7 +52,7 @@ public class AnnotatedTextFieldTypeTests extends FieldTypeTestCase {
public void testIntervals() throws IOException {
MappedFieldType ft = new AnnotatedTextFieldMapper.AnnotatedTextFieldType("field", Collections.emptyMap());
NamedAnalyzer a = new NamedAnalyzer("name", AnalyzerScope.INDEX, new StandardAnalyzer());
- IntervalsSource source = ft.intervals("Donald Trump", 0, true, a, false);
+ IntervalsSource source = ft.intervals("Donald Trump", 0, IntervalMode.ORDERED, a, false);
assertEquals(Intervals.phrase(Intervals.term("donald"), Intervals.term("trump")), source);
}
diff --git a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml
index be3b32e6338dc..b4acccf36879d 100644
--- a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml
+++ b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml
@@ -3,10 +3,6 @@
---
"annotated highlighter on annotated text":
- - skip:
- version: " - 6.4.99"
- reason: Annotated text type introduced in 6.5.0
-
- do:
indices.create:
index: annotated
@@ -80,10 +76,6 @@
---
"issue 39395 thread safety issue -requires multiple calls to reveal":
- - skip:
- version: " - 6.4.99"
- reason: Annotated text type introduced in 6.5.0
-
- do:
indices.create:
index: annotated
diff --git a/plugins/mapper-murmur3/src/main/java/org/opensearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/opensearch/index/mapper/murmur3/Murmur3FieldMapper.java
index ffbb1905bcd98..4e87b03132055 100644
--- a/plugins/mapper-murmur3/src/main/java/org/opensearch/index/mapper/murmur3/Murmur3FieldMapper.java
+++ b/plugins/mapper-murmur3/src/main/java/org/opensearch/index/mapper/murmur3/Murmur3FieldMapper.java
@@ -44,7 +44,6 @@
import org.opensearch.index.fielddata.plain.SortedNumericIndexFieldData;
import org.opensearch.index.mapper.FieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
-import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.ParametrizedFieldMapper;
import org.opensearch.index.mapper.ParseContext;
import org.opensearch.index.mapper.SourceValueFetcher;
@@ -121,8 +120,8 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
}
@Override
- public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
- return SourceValueFetcher.toString(name(), mapperService, format);
+ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) {
+ return SourceValueFetcher.toString(name(), context, format);
}
@Override
diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java
index 4811c7d12759c..10edd6d2586d9 100644
--- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java
+++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java
@@ -137,7 +137,7 @@ public void testBasic() throws Exception {
assertAcked(prepareCreate("test").addMapping("type", "_size", "enabled=true"));
final String source = "{\"f\":10}";
indexRandom(true, client().prepareIndex("test", "type", "1").setSource(source, XContentType.JSON));
- GetResponse getResponse = client().prepareGet("test", "type", "1").setStoredFields("_size").get();
+ GetResponse getResponse = client().prepareGet("test", "1").setStoredFields("_size").get();
assertNotNull(getResponse.getField("_size"));
assertEquals(source.length(), (int) getResponse.getField("_size").getValue());
}
diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle
index 81ef4e98923a3..88ce2f667cee2 100644
--- a/plugins/repository-azure/build.gradle
+++ b/plugins/repository-azure/build.gradle
@@ -46,7 +46,7 @@ opensearchplugin {
dependencies {
api 'com.azure:azure-core:1.22.0'
api 'com.azure:azure-storage-common:12.14.0'
- api 'com.azure:azure-core-http-netty:1.11.2'
+ api 'com.azure:azure-core-http-netty:1.11.7'
api "io.netty:netty-codec-dns:${versions.netty}"
api "io.netty:netty-codec-socks:${versions.netty}"
api "io.netty:netty-codec-http2:${versions.netty}"
@@ -56,7 +56,7 @@ dependencies {
implementation project(':modules:transport-netty4')
api 'com.azure:azure-storage-blob:12.14.1'
api 'org.reactivestreams:reactive-streams:1.0.3'
- api 'io.projectreactor:reactor-core:3.4.11'
+ api 'io.projectreactor:reactor-core:3.4.15'
api 'io.projectreactor.netty:reactor-netty:1.0.13'
api 'io.projectreactor.netty:reactor-netty-core:1.0.13'
api 'io.projectreactor.netty:reactor-netty-http:1.0.13'
@@ -119,25 +119,16 @@ thirdPartyAudit {
'io.micrometer.core.instrument.search.Search',
'io.netty.channel.epoll.Epoll',
'io.netty.channel.epoll.EpollDatagramChannel',
- 'io.netty.channel.epoll.EpollDomainDatagramChannel',
- 'io.netty.channel.epoll.EpollDomainSocketChannel',
- 'io.netty.channel.epoll.EpollEventLoopGroup',
- 'io.netty.channel.epoll.EpollServerDomainSocketChannel',
'io.netty.channel.epoll.EpollServerSocketChannel',
'io.netty.channel.epoll.EpollSocketChannel',
'io.netty.channel.kqueue.KQueue',
'io.netty.channel.kqueue.KQueueDatagramChannel',
- 'io.netty.channel.kqueue.KQueueDomainDatagramChannel',
- 'io.netty.channel.kqueue.KQueueDomainSocketChannel',
- 'io.netty.channel.kqueue.KQueueEventLoopGroup',
- 'io.netty.channel.kqueue.KQueueServerDomainSocketChannel',
'io.netty.channel.kqueue.KQueueServerSocketChannel',
'io.netty.channel.kqueue.KQueueSocketChannel',
'io.netty.handler.codec.haproxy.HAProxyMessage',
'io.netty.handler.codec.haproxy.HAProxyMessageDecoder',
'io.netty.incubator.channel.uring.IOUring',
'io.netty.incubator.channel.uring.IOUringDatagramChannel',
- 'io.netty.incubator.channel.uring.IOUringEventLoopGroup',
'io.netty.incubator.channel.uring.IOUringServerSocketChannel',
'io.netty.incubator.channel.uring.IOUringSocketChannel',
'javax.activation.DataHandler',
@@ -167,7 +158,6 @@ thirdPartyAudit {
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters',
- 'kotlin.TypeCastException',
'kotlin.collections.ArraysKt',
'kotlin.jvm.JvmClassMappingKt',
'kotlin.jvm.functions.Function0',
diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.2.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.2.jar.sha1
deleted file mode 100644
index 3d3c0a59a77ba..0000000000000
--- a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7d84ec31d73a7b51bc72044789768b25fb2b14f4
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1
new file mode 100644
index 0000000000000..25db85393f2af
--- /dev/null
+++ b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1
@@ -0,0 +1 @@
+c6b14fcca3e75acc8dbe07ac101afd05d48a1647
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.12.5.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.12.5.jar.sha1
deleted file mode 100644
index 797bcf2b161d4..0000000000000
--- a/plugins/repository-azure/licenses/jackson-annotations-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-52d929d5bb21d0186fe24c09624cc3ee4bafc3b3
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.12.6.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..48ee3bf53c630
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-annotations-2.12.6.jar.sha1
@@ -0,0 +1 @@
+9487231edd6b0b1f14692c9cba9e0462809215d1
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-databind-2.12.5.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.12.5.jar.sha1
deleted file mode 100644
index ca1bd46bc3cd3..0000000000000
--- a/plugins/repository-azure/licenses/jackson-databind-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b064cf057f23d3d35390328c5030847efeffedde
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-databind-2.12.6.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..f74842887d31b
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-databind-2.12.6.jar.sha1
@@ -0,0 +1 @@
+fac216b606c1086e36acea6e572ee61572ad1670
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.12.5.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.12.5.jar.sha1
deleted file mode 100644
index 6711b58ae535f..0000000000000
--- a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4b872e5a9f7e6644c2dd8d7358ed9fad714d7c90
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.12.6.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..43ee9816d99e3
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.12.6.jar.sha1
@@ -0,0 +1 @@
+fecb8514c3a89102bd619b6c624f906a6098b588
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.12.5.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.12.5.jar.sha1
deleted file mode 100644
index 930cb084f54c2..0000000000000
--- a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a0a9870b681a72789c5c6bdc380e45ab719c6aa3
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.12.6.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..d005cc1bd5f11
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.12.6.jar.sha1
@@ -0,0 +1 @@
+0f7d0d854f24c4254885c275a09fb885ef578b48
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.12.5.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.12.5.jar.sha1
deleted file mode 100644
index f10aa2634ca97..0000000000000
--- a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-02b389d7206327e54ae31f709ab75a4a3f33e148
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.12.6.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..686d813e002c8
--- /dev/null
+++ b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.12.6.jar.sha1
@@ -0,0 +1 @@
+a0bea2c6f98eb0dc24208b54a53da80ea459c156
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.72.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.72.Final.jar.sha1
deleted file mode 100644
index c8db6b5611676..0000000000000
--- a/plugins/repository-azure/licenses/netty-codec-dns-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-43030c869832981a626458073e86070858747e06
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.73.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..320ae18c98bda
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-codec-dns-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+46137a5b01a5202059324cf4300443e53f11a38d
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.72.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.72.Final.jar.sha1
deleted file mode 100644
index 710f2136045ee..0000000000000
--- a/plugins/repository-azure/licenses/netty-codec-http2-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4b269e666fbace27d2c1efa57703e99b09655822
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.73.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..d7f5a464bcc00
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-codec-http2-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+0eb145bc31fd32a20fd2a3e8b30736d2e0248b0c
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.72.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.72.Final.jar.sha1
deleted file mode 100644
index 4f9cb69436cdc..0000000000000
--- a/plugins/repository-azure/licenses/netty-codec-socks-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d3bc427b6e2dc4bb6dc9d18d1cc47f8530970a8b
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.73.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..6ba41c576c93d
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-codec-socks-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+cefa44d8f5dcaab21179d945f12b6c6d7325cce9
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.72.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.72.Final.jar.sha1
deleted file mode 100644
index 791abf7a66002..0000000000000
--- a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-90de0fe610454d4296052ae36acb8a6a1d0333f1
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.73.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..f50c9abf023cf
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+d1afa6876c3d3bdbdbe5127ddd495e6514d6e600
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.72.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.72.Final.jar.sha1
deleted file mode 100644
index a50ef9c68c247..0000000000000
--- a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a2fc945c3688e2b7d6ddb2c25f33832349657fa0
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.73.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..817fa4cc0d86f
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+97cdf5fb97f8d961cfa3ffb05175009b90e5cfee
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.72.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.72.Final.jar.sha1
deleted file mode 100644
index 4530a75b61263..0000000000000
--- a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cedc023ffdcb68543b22a1ebc7960a160589aa09
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.73.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..22b8f58bd5103
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+4701063d36f390e02da6da85c13e32a0e78349d2
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/reactor-core-3.4.11.jar.sha1 b/plugins/repository-azure/licenses/reactor-core-3.4.11.jar.sha1
deleted file mode 100644
index fc0911be8fedf..0000000000000
--- a/plugins/repository-azure/licenses/reactor-core-3.4.11.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0e305f6aa6e6da26aa42726f8cfd69b6ab53d7c0
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 b/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1
new file mode 100644
index 0000000000000..a89de48b20b51
--- /dev/null
+++ b/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1
@@ -0,0 +1 @@
+28ccf513fe64709c8ded30ea3f387fc718db9626
\ No newline at end of file
diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java
index 6345103c6ecc6..753c902a6eb01 100644
--- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java
+++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java
@@ -413,8 +413,8 @@ public Map children(BlobPath path) throws URISyntaxExcept
public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws URISyntaxException,
BlobStorageException, IOException {
- assert inputStream
- .markSupported() : "Should not be used with non-mark supporting streams as their retry handling in the SDK is broken";
+ assert inputStream.markSupported()
+ : "Should not be used with non-mark supporting streams as their retry handling in the SDK is broken";
logger.trace(() -> new ParameterizedMessage("writeBlob({}, stream, {})", blobName, blobSize));
final Tuple> client = client();
final BlobContainerClient blobContainer = client.v1().getBlobContainerClient(container);
diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepositoryPlugin.java
index aa41941436171..82ab5243a09aa 100644
--- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepositoryPlugin.java
+++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepositoryPlugin.java
@@ -94,13 +94,15 @@ public List> getSettings() {
AzureStorageSettings.ENDPOINT_SUFFIX_SETTING,
AzureStorageSettings.TIMEOUT_SETTING,
AzureStorageSettings.MAX_RETRIES_SETTING,
- AzureStorageSettings.PROXY_TYPE_SETTING,
- AzureStorageSettings.PROXY_HOST_SETTING,
- AzureStorageSettings.PROXY_PORT_SETTING,
AzureStorageSettings.CONNECT_TIMEOUT_SETTING,
AzureStorageSettings.WRITE_TIMEOUT_SETTING,
AzureStorageSettings.READ_TIMEOUT_SETTING,
- AzureStorageSettings.RESPONSE_TIMEOUT_SETTING
+ AzureStorageSettings.RESPONSE_TIMEOUT_SETTING,
+ AzureStorageSettings.PROXY_TYPE_SETTING,
+ AzureStorageSettings.PROXY_HOST_SETTING,
+ AzureStorageSettings.PROXY_PORT_SETTING,
+ AzureStorageSettings.PROXY_USERNAME_SETTING,
+ AzureStorageSettings.PROXY_PASSWORD_SETTING
);
}
diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java
index 6cd3a149c6957..3800be7c2d27d 100644
--- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java
+++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java
@@ -43,7 +43,6 @@
import com.azure.core.http.HttpRequest;
import com.azure.core.http.HttpResponse;
import com.azure.core.http.ProxyOptions;
-import com.azure.core.http.ProxyOptions.Type;
import com.azure.core.http.netty.NettyAsyncHttpClientBuilder;
import com.azure.core.http.policy.HttpPipelinePolicy;
import com.azure.core.util.Configuration;
@@ -66,12 +65,11 @@
import org.opensearch.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
-import java.net.InetSocketAddress;
-import java.net.Proxy;
+import java.net.Authenticator;
+import java.net.PasswordAuthentication;
import java.net.URISyntaxException;
import java.security.InvalidKeyException;
import java.time.Duration;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -169,15 +167,20 @@ private ClientState buildClient(AzureStorageSettings azureStorageSettings, BiCon
final NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(new NioThreadFactory());
final NettyAsyncHttpClientBuilder clientBuilder = new NettyAsyncHttpClientBuilder().eventLoopGroup(eventLoopGroup);
- final Proxy proxy = azureStorageSettings.getProxy();
- if (proxy != null) {
- final Type type = Arrays.stream(Type.values())
- .filter(t -> t.toProxyType().equals(proxy.type()))
- .findFirst()
- .orElseThrow(() -> new IllegalArgumentException("Unsupported proxy type: " + proxy.type()));
-
- clientBuilder.proxy(new ProxyOptions(type, (InetSocketAddress) proxy.address()));
- }
+ SocketAccess.doPrivilegedVoidException(() -> {
+ final ProxySettings proxySettings = azureStorageSettings.getProxySettings();
+ if (proxySettings != ProxySettings.NO_PROXY_SETTINGS) {
+ if (proxySettings.isAuthenticated()) {
+ Authenticator.setDefault(new Authenticator() {
+ @Override
+ protected PasswordAuthentication getPasswordAuthentication() {
+ return new PasswordAuthentication(proxySettings.getUsername(), proxySettings.getPassword().toCharArray());
+ }
+ });
+ }
+ clientBuilder.proxy(new ProxyOptions(proxySettings.getType().toProxyType(), proxySettings.getAddress()));
+ }
+ });
final TimeValue connectTimeout = azureStorageSettings.getConnectTimeout();
if (connectTimeout != null) {
diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java
index 94ec553ab760e..c9a031451bccd 100644
--- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java
+++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java
@@ -44,8 +44,6 @@
import org.opensearch.common.settings.SettingsException;
import org.opensearch.common.unit.TimeValue;
import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.Proxy;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.HashMap;
@@ -143,10 +141,10 @@ final class AzureStorageSettings {
);
/** The type of the proxy to connect to azure through. Can be direct (no proxy, default), http or socks */
- public static final AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting(
+ public static final AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting(
AZURE_CLIENT_PREFIX_KEY,
"proxy.type",
- (key) -> new Setting<>(key, "direct", s -> Proxy.Type.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope),
+ (key) -> new Setting<>(key, "direct", s -> ProxySettings.ProxyType.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope),
() -> ACCOUNT_SETTING,
() -> KEY_SETTING
);
@@ -162,27 +160,50 @@ final class AzureStorageSettings {
);
/** The port of a proxy to connect to azure through. */
- public static final Setting PROXY_PORT_SETTING = Setting.affixKeySetting(
+ public static final AffixSetting PROXY_PORT_SETTING = Setting.affixKeySetting(
AZURE_CLIENT_PREFIX_KEY,
"proxy.port",
(key) -> Setting.intSetting(key, 0, 0, 65535, Setting.Property.NodeScope),
+ () -> KEY_SETTING,
() -> ACCOUNT_SETTING,
+ () -> PROXY_TYPE_SETTING,
+ () -> PROXY_HOST_SETTING
+ );
+
+ /** The username of a proxy to connect */
+ static final AffixSetting PROXY_USERNAME_SETTING = Setting.affixKeySetting(
+ AZURE_CLIENT_PREFIX_KEY,
+ "proxy.username",
+ key -> SecureSetting.secureString(key, null),
() -> KEY_SETTING,
+ () -> ACCOUNT_SETTING,
() -> PROXY_TYPE_SETTING,
() -> PROXY_HOST_SETTING
);
+ /** The password of a proxy to connect */
+ static final AffixSetting PROXY_PASSWORD_SETTING = Setting.affixKeySetting(
+ AZURE_CLIENT_PREFIX_KEY,
+ "proxy.password",
+ key -> SecureSetting.secureString(key, null),
+ () -> KEY_SETTING,
+ () -> ACCOUNT_SETTING,
+ () -> PROXY_TYPE_SETTING,
+ () -> PROXY_HOST_SETTING,
+ () -> PROXY_USERNAME_SETTING
+ );
+
private final String account;
private final String connectString;
private final String endpointSuffix;
private final TimeValue timeout;
private final int maxRetries;
- private final Proxy proxy;
private final LocationMode locationMode;
private final TimeValue connectTimeout;
private final TimeValue writeTimeout;
private final TimeValue readTimeout;
private final TimeValue responseTimeout;
+ private final ProxySettings proxySettings;
// copy-constructor
private AzureStorageSettings(
@@ -191,24 +212,24 @@ private AzureStorageSettings(
String endpointSuffix,
TimeValue timeout,
int maxRetries,
- Proxy proxy,
LocationMode locationMode,
TimeValue connectTimeout,
TimeValue writeTimeout,
TimeValue readTimeout,
- TimeValue responseTimeout
+ TimeValue responseTimeout,
+ ProxySettings proxySettings
) {
this.account = account;
this.connectString = connectString;
this.endpointSuffix = endpointSuffix;
this.timeout = timeout;
this.maxRetries = maxRetries;
- this.proxy = proxy;
this.locationMode = locationMode;
this.connectTimeout = connectTimeout;
this.writeTimeout = writeTimeout;
this.readTimeout = readTimeout;
this.responseTimeout = responseTimeout;
+ this.proxySettings = proxySettings;
}
private AzureStorageSettings(
@@ -218,42 +239,23 @@ private AzureStorageSettings(
String endpointSuffix,
TimeValue timeout,
int maxRetries,
- Proxy.Type proxyType,
- String proxyHost,
- Integer proxyPort,
TimeValue connectTimeout,
TimeValue writeTimeout,
TimeValue readTimeout,
- TimeValue responseTimeout
+ TimeValue responseTimeout,
+ ProxySettings proxySettings
) {
this.account = account;
this.connectString = buildConnectString(account, key, sasToken, endpointSuffix);
this.endpointSuffix = endpointSuffix;
this.timeout = timeout;
this.maxRetries = maxRetries;
- // Register the proxy if we have any
- // Validate proxy settings
- if (proxyType.equals(Proxy.Type.DIRECT) && ((proxyPort != 0) || Strings.hasText(proxyHost))) {
- throw new SettingsException("Azure Proxy port or host have been set but proxy type is not defined.");
- }
- if ((proxyType.equals(Proxy.Type.DIRECT) == false) && ((proxyPort == 0) || Strings.isEmpty(proxyHost))) {
- throw new SettingsException("Azure Proxy type has been set but proxy host or port is not defined.");
- }
-
- if (proxyType.equals(Proxy.Type.DIRECT)) {
- proxy = null;
- } else {
- try {
- proxy = new Proxy(proxyType, new InetSocketAddress(InetAddress.getByName(proxyHost), proxyPort));
- } catch (final UnknownHostException e) {
- throw new SettingsException("Azure proxy host is unknown.", e);
- }
- }
this.locationMode = LocationMode.PRIMARY_ONLY;
this.connectTimeout = connectTimeout;
this.writeTimeout = writeTimeout;
this.readTimeout = readTimeout;
this.responseTimeout = responseTimeout;
+ this.proxySettings = proxySettings;
}
public String getEndpointSuffix() {
@@ -268,8 +270,8 @@ public int getMaxRetries() {
return maxRetries;
}
- public Proxy getProxy() {
- return proxy;
+ public ProxySettings getProxySettings() {
+ return proxySettings;
}
public String getConnectString() {
@@ -325,7 +327,7 @@ public String toString() {
sb.append(", timeout=").append(timeout);
sb.append(", endpointSuffix='").append(endpointSuffix).append('\'');
sb.append(", maxRetries=").append(maxRetries);
- sb.append(", proxy=").append(proxy);
+ sb.append(", proxySettings=").append(proxySettings != ProxySettings.NO_PROXY_SETTINGS ? "PROXY_SET" : "PROXY_NOT_SET");
sb.append(", locationMode='").append(locationMode).append('\'');
sb.append(", connectTimeout='").append(connectTimeout).append('\'');
sb.append(", writeTimeout='").append(writeTimeout).append('\'');
@@ -371,17 +373,42 @@ private static AzureStorageSettings getClientSettings(Settings settings, String
getValue(settings, clientName, ENDPOINT_SUFFIX_SETTING),
getValue(settings, clientName, TIMEOUT_SETTING),
getValue(settings, clientName, MAX_RETRIES_SETTING),
- getValue(settings, clientName, PROXY_TYPE_SETTING),
- getValue(settings, clientName, PROXY_HOST_SETTING),
- getValue(settings, clientName, PROXY_PORT_SETTING),
getValue(settings, clientName, CONNECT_TIMEOUT_SETTING),
getValue(settings, clientName, WRITE_TIMEOUT_SETTING),
getValue(settings, clientName, READ_TIMEOUT_SETTING),
- getValue(settings, clientName, RESPONSE_TIMEOUT_SETTING)
+ getValue(settings, clientName, RESPONSE_TIMEOUT_SETTING),
+ validateAndCreateProxySettings(settings, clientName)
);
}
}
+ static ProxySettings validateAndCreateProxySettings(final Settings settings, final String clientName) {
+ final ProxySettings.ProxyType proxyType = getConfigValue(settings, clientName, PROXY_TYPE_SETTING);
+ final String proxyHost = getConfigValue(settings, clientName, PROXY_HOST_SETTING);
+ final int proxyPort = getConfigValue(settings, clientName, PROXY_PORT_SETTING);
+ final SecureString proxyUserName = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING);
+ final SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING);
+ // Validate proxy settings
+ if (proxyType == ProxySettings.ProxyType.DIRECT
+ && (proxyPort != 0 || Strings.hasText(proxyHost) || Strings.hasText(proxyUserName) || Strings.hasText(proxyPassword))) {
+ throw new SettingsException("Azure proxy port or host or username or password have been set but proxy type is not defined.");
+ }
+ if (proxyType != ProxySettings.ProxyType.DIRECT && (proxyPort == 0 || Strings.isEmpty(proxyHost))) {
+ throw new SettingsException("Azure proxy type has been set but proxy host or port is not defined.");
+ }
+
+ if (proxyType == ProxySettings.ProxyType.DIRECT) {
+ return ProxySettings.NO_PROXY_SETTINGS;
+ }
+
+ try {
+ final InetAddress proxyHostAddress = InetAddress.getByName(proxyHost);
+ return new ProxySettings(proxyType, proxyHostAddress, proxyPort, proxyUserName.toString(), proxyPassword.toString());
+ } catch (final UnknownHostException e) {
+ throw new SettingsException("Azure proxy host is unknown.", e);
+ }
+ }
+
private static T getConfigValue(Settings settings, String clientName, Setting.AffixSetting clientSetting) {
final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName);
return concreteSetting.get(settings);
@@ -407,12 +434,12 @@ static Map overrideLocationMode(
entry.getValue().endpointSuffix,
entry.getValue().timeout,
entry.getValue().maxRetries,
- entry.getValue().proxy,
locationMode,
entry.getValue().connectTimeout,
entry.getValue().writeTimeout,
entry.getValue().readTimeout,
- entry.getValue().responseTimeout
+ entry.getValue().responseTimeout,
+ entry.getValue().getProxySettings()
)
);
}
diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/ProxySettings.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/ProxySettings.java
new file mode 100644
index 0000000000000..df8c95e69acf2
--- /dev/null
+++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/ProxySettings.java
@@ -0,0 +1,110 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.repositories.azure;
+
+import com.azure.core.http.ProxyOptions;
+import org.opensearch.common.Strings;
+import org.opensearch.common.settings.SettingsException;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.util.Objects;
+
+public class ProxySettings {
+
+ public static final ProxySettings NO_PROXY_SETTINGS = new ProxySettings(ProxyType.DIRECT, null, -1, null, null);
+
+ private final ProxyType type;
+
+ private final InetAddress host;
+
+ private final String username;
+
+ private final String password;
+
+ private final int port;
+
+ public static enum ProxyType {
+ HTTP(ProxyOptions.Type.HTTP.name()),
+
+ /**
+ * Please use SOCKS4 instead
+ */
+ @Deprecated
+ SOCKS(ProxyOptions.Type.SOCKS4.name()),
+
+ SOCKS4(ProxyOptions.Type.SOCKS4.name()),
+
+ SOCKS5(ProxyOptions.Type.SOCKS5.name()),
+
+ DIRECT("DIRECT");
+
+ private final String name;
+
+ private ProxyType(String name) {
+ this.name = name;
+ }
+
+ public ProxyOptions.Type toProxyType() {
+ if (this == DIRECT) {
+ // We check it in settings,
+ // the probability that it could be thrown is small, but how knows
+ throw new SettingsException("Couldn't convert to Azure proxy type");
+ }
+ return ProxyOptions.Type.valueOf(name());
+ }
+
+ }
+
+ public ProxySettings(final ProxyType type, final InetAddress host, final int port, final String username, final String password) {
+ this.type = type;
+ this.host = host;
+ this.port = port;
+ this.username = username;
+ this.password = password;
+ }
+
+ public ProxyType getType() {
+ return this.type;
+ }
+
+ public InetSocketAddress getAddress() {
+ return new InetSocketAddress(host, port);
+ }
+
+ public String getUsername() {
+ return this.username;
+ }
+
+ public String getPassword() {
+ return this.password;
+ }
+
+ public boolean isAuthenticated() {
+ return Strings.isNullOrEmpty(username) == false && Strings.isNullOrEmpty(password) == false;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ final ProxySettings that = (ProxySettings) o;
+ return port == that.port
+ && type == that.type
+ && Objects.equals(host, that.host)
+ && Objects.equals(username, that.username)
+ && Objects.equals(password, that.password);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(type, host, username, password, port);
+ }
+
+}
diff --git a/plugins/repository-azure/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-azure/src/main/plugin-metadata/plugin-security.policy
index f6d0f5fcb08d5..f3bf52ea46505 100644
--- a/plugins/repository-azure/src/main/plugin-metadata/plugin-security.policy
+++ b/plugins/repository-azure/src/main/plugin-metadata/plugin-security.policy
@@ -38,4 +38,7 @@ grant {
permission java.lang.RuntimePermission "accessDeclaredMembers";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
permission java.lang.RuntimePermission "setContextClassLoader";
+
+ // azure client set Authenticator for proxy username/password
+ permission java.net.NetPermission "setDefaultAuthenticator";
};
diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureStorageServiceTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureStorageServiceTests.java
index 785ebef7307bc..7f5ca73a507ad 100644
--- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureStorageServiceTests.java
+++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureStorageServiceTests.java
@@ -32,6 +32,7 @@
package org.opensearch.repositories.azure;
+import org.opensearch.common.Strings;
import reactor.core.scheduler.Schedulers;
import com.azure.core.http.policy.HttpPipelinePolicy;
@@ -50,7 +51,6 @@
import java.io.UncheckedIOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
-import java.net.Proxy;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
@@ -299,9 +299,9 @@ public void testGetSelectedClientBackoffPolicyNbRetries() {
public void testNoProxy() {
final Settings settings = Settings.builder().setSecureSettings(buildSecureSettings()).build();
final AzureStorageService mock = storageServiceWithSettingsValidation(settings);
- assertThat(mock.storageSettings.get("azure1").getProxy(), nullValue());
- assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue());
- assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue());
+ assertEquals(mock.storageSettings.get("azure1").getProxySettings(), ProxySettings.NO_PROXY_SETTINGS);
+ assertEquals(mock.storageSettings.get("azure2").getProxySettings(), ProxySettings.NO_PROXY_SETTINGS);
+ assertEquals(mock.storageSettings.get("azure3").getProxySettings(), ProxySettings.NO_PROXY_SETTINGS);
}
public void testProxyHttp() throws UnknownHostException {
@@ -312,13 +312,13 @@ public void testProxyHttp() throws UnknownHostException {
.put("azure.client.azure1.proxy.type", "http")
.build();
final AzureStorageService mock = storageServiceWithSettingsValidation(settings);
- final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy();
+ final ProxySettings azure1Proxy = mock.storageSettings.get("azure1").getProxySettings();
assertThat(azure1Proxy, notNullValue());
- assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP));
- assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080)));
- assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue());
- assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue());
+ assertThat(azure1Proxy.getType(), is(ProxySettings.ProxyType.HTTP));
+ assertThat(azure1Proxy.getAddress(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080)));
+ assertEquals(ProxySettings.NO_PROXY_SETTINGS, mock.storageSettings.get("azure2").getProxySettings());
+ assertEquals(ProxySettings.NO_PROXY_SETTINGS, mock.storageSettings.get("azure3").getProxySettings());
}
public void testMultipleProxies() throws UnknownHostException {
@@ -332,52 +332,59 @@ public void testMultipleProxies() throws UnknownHostException {
.put("azure.client.azure2.proxy.type", "http")
.build();
final AzureStorageService mock = storageServiceWithSettingsValidation(settings);
- final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy();
+ final ProxySettings azure1Proxy = mock.storageSettings.get("azure1").getProxySettings();
assertThat(azure1Proxy, notNullValue());
- assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP));
- assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080)));
- final Proxy azure2Proxy = mock.storageSettings.get("azure2").getProxy();
+ assertThat(azure1Proxy.getType(), is(ProxySettings.ProxyType.HTTP));
+ assertThat(azure1Proxy.getAddress(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080)));
+ final ProxySettings azure2Proxy = mock.storageSettings.get("azure2").getProxySettings();
assertThat(azure2Proxy, notNullValue());
- assertThat(azure2Proxy.type(), is(Proxy.Type.HTTP));
- assertThat(azure2Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8081)));
- assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue());
+ assertThat(azure2Proxy.getType(), is(ProxySettings.ProxyType.HTTP));
+ assertThat(azure2Proxy.getAddress(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8081)));
+ assertTrue(Strings.isNullOrEmpty(azure2Proxy.getUsername()));
+ assertTrue(Strings.isNullOrEmpty(azure2Proxy.getPassword()));
+ assertEquals(mock.storageSettings.get("azure3").getProxySettings(), ProxySettings.NO_PROXY_SETTINGS);
}
public void testProxySocks() throws UnknownHostException {
+ final MockSecureSettings secureSettings = buildSecureSettings();
+ secureSettings.setString("azure.client.azure1.proxy.username", "user");
+ secureSettings.setString("azure.client.azure1.proxy.password", "pwd");
final Settings settings = Settings.builder()
- .setSecureSettings(buildSecureSettings())
.put("azure.client.azure1.proxy.host", "127.0.0.1")
.put("azure.client.azure1.proxy.port", 8080)
- .put("azure.client.azure1.proxy.type", "socks")
+ .put("azure.client.azure1.proxy.type", "socks5")
+ .setSecureSettings(secureSettings)
.build();
final AzureStorageService mock = storageServiceWithSettingsValidation(settings);
- final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy();
+ final ProxySettings azure1Proxy = mock.storageSettings.get("azure1").getProxySettings();
assertThat(azure1Proxy, notNullValue());
- assertThat(azure1Proxy.type(), is(Proxy.Type.SOCKS));
- assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080)));
- assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue());
- assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue());
+ assertThat(azure1Proxy.getType(), is(ProxySettings.ProxyType.SOCKS5));
+ assertThat(azure1Proxy.getAddress(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080)));
+ assertEquals("user", azure1Proxy.getUsername());
+ assertEquals("pwd", azure1Proxy.getPassword());
+ assertEquals(ProxySettings.NO_PROXY_SETTINGS, mock.storageSettings.get("azure2").getProxySettings());
+ assertEquals(ProxySettings.NO_PROXY_SETTINGS, mock.storageSettings.get("azure3").getProxySettings());
}
public void testProxyNoHost() {
final Settings settings = Settings.builder()
.setSecureSettings(buildSecureSettings())
.put("azure.client.azure1.proxy.port", 8080)
- .put("azure.client.azure1.proxy.type", randomFrom("socks", "http"))
+ .put("azure.client.azure1.proxy.type", randomFrom("socks", "socks4", "socks5", "http"))
.build();
final SettingsException e = expectThrows(SettingsException.class, () -> storageServiceWithSettingsValidation(settings));
- assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage());
+ assertEquals("Azure proxy type has been set but proxy host or port is not defined.", e.getMessage());
}
public void testProxyNoPort() {
final Settings settings = Settings.builder()
.setSecureSettings(buildSecureSettings())
.put("azure.client.azure1.proxy.host", "127.0.0.1")
- .put("azure.client.azure1.proxy.type", randomFrom("socks", "http"))
+ .put("azure.client.azure1.proxy.type", randomFrom("socks", "socks4", "socks5", "http"))
.build();
final SettingsException e = expectThrows(SettingsException.class, () -> storageServiceWithSettingsValidation(settings));
- assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage());
+ assertEquals("Azure proxy type has been set but proxy host or port is not defined.", e.getMessage());
}
public void testProxyNoType() {
@@ -388,13 +395,13 @@ public void testProxyNoType() {
.build();
final SettingsException e = expectThrows(SettingsException.class, () -> storageServiceWithSettingsValidation(settings));
- assertEquals("Azure Proxy port or host have been set but proxy type is not defined.", e.getMessage());
+ assertEquals("Azure proxy port or host or username or password have been set but proxy type is not defined.", e.getMessage());
}
public void testProxyWrongHost() {
final Settings settings = Settings.builder()
.setSecureSettings(buildSecureSettings())
- .put("azure.client.azure1.proxy.type", randomFrom("socks", "http"))
+ .put("azure.client.azure1.proxy.type", randomFrom("socks", "socks4", "socks5", "http"))
.put("azure.client.azure1.proxy.host", "thisisnotavalidhostorwehavebeensuperunlucky")
.put("azure.client.azure1.proxy.port", 8080)
.build();
diff --git a/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml b/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml
index 650d5c4474199..beaa95b732d52 100644
--- a/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml
+++ b/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml
@@ -45,17 +45,14 @@ setup:
body:
- index:
_index: docs
- _type: doc
_id: 1
- snapshot: one
- index:
_index: docs
- _type: doc
_id: 2
- snapshot: one
- index:
_index: docs
- _type: doc
_id: 3
- snapshot: one
@@ -93,22 +90,18 @@ setup:
body:
- index:
_index: docs
- _type: doc
_id: 4
- snapshot: two
- index:
_index: docs
- _type: doc
_id: 5
- snapshot: two
- index:
_index: docs
- _type: doc
_id: 6
- snapshot: two
- index:
_index: docs
- _type: doc
_id: 7
- snapshot: two
diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle
index 5ab7d79d6f0c5..c7e7bc5f40cce 100644
--- a/plugins/repository-gcs/build.gradle
+++ b/plugins/repository-gcs/build.gradle
@@ -61,8 +61,8 @@ dependencies {
api 'com.google.api:api-common:1.8.1'
api 'com.google.api:gax:1.54.0'
api 'org.threeten:threetenbp:1.4.4'
- api 'com.google.protobuf:protobuf-java-util:3.11.3'
- api 'com.google.protobuf:protobuf-java:3.11.3'
+ api 'com.google.protobuf:protobuf-java-util:3.19.3'
+ api 'com.google.protobuf:protobuf-java:3.19.3'
api 'com.google.code.gson:gson:2.8.9'
api 'com.google.api.grpc:proto-google-common-protos:1.16.0'
api 'com.google.api.grpc:proto-google-iam-v1:0.12.0'
@@ -165,39 +165,23 @@ thirdPartyAudit {
'org.apache.http.client.RedirectHandler',
'org.apache.http.client.RequestDirector',
'org.apache.http.client.UserTokenHandler',
- 'org.apache.http.client.methods.HttpDelete',
'org.apache.http.client.methods.HttpEntityEnclosingRequestBase',
- 'org.apache.http.client.methods.HttpGet',
- 'org.apache.http.client.methods.HttpHead',
- 'org.apache.http.client.methods.HttpOptions',
- 'org.apache.http.client.methods.HttpPost',
- 'org.apache.http.client.methods.HttpPut',
'org.apache.http.client.methods.HttpRequestBase',
- 'org.apache.http.client.methods.HttpTrace',
'org.apache.http.config.SocketConfig',
'org.apache.http.config.SocketConfig$Builder',
'org.apache.http.conn.ClientConnectionManager',
'org.apache.http.conn.ConnectionKeepAliveStrategy',
'org.apache.http.conn.params.ConnManagerParams',
- 'org.apache.http.conn.params.ConnPerRouteBean',
'org.apache.http.conn.params.ConnRouteParams',
'org.apache.http.conn.routing.HttpRoutePlanner',
'org.apache.http.conn.scheme.PlainSocketFactory',
- 'org.apache.http.conn.scheme.Scheme',
'org.apache.http.conn.scheme.SchemeRegistry',
- 'org.apache.http.conn.ssl.SSLConnectionSocketFactory',
'org.apache.http.conn.ssl.SSLSocketFactory',
'org.apache.http.conn.ssl.X509HostnameVerifier',
'org.apache.http.entity.AbstractHttpEntity',
'org.apache.http.impl.client.DefaultHttpClient',
- 'org.apache.http.impl.client.DefaultHttpRequestRetryHandler',
'org.apache.http.impl.client.HttpClientBuilder',
'org.apache.http.impl.conn.PoolingHttpClientConnectionManager',
- 'org.apache.http.impl.conn.ProxySelectorRoutePlanner',
- 'org.apache.http.impl.conn.SystemDefaultRoutePlanner',
- 'org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager',
- 'org.apache.http.message.BasicHttpResponse',
- 'org.apache.http.params.BasicHttpParams',
'org.apache.http.params.HttpConnectionParams',
'org.apache.http.params.HttpParams',
'org.apache.http.params.HttpProtocolParams',
diff --git a/plugins/repository-gcs/licenses/protobuf-java-3.11.3.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-3.11.3.jar.sha1
deleted file mode 100644
index 371f423c3751e..0000000000000
--- a/plugins/repository-gcs/licenses/protobuf-java-3.11.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-df12be70b968e32442821a2cfdc3cede5a42dec5
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/protobuf-java-3.19.3.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-3.19.3.jar.sha1
new file mode 100644
index 0000000000000..655ecd1f1c1c9
--- /dev/null
+++ b/plugins/repository-gcs/licenses/protobuf-java-3.19.3.jar.sha1
@@ -0,0 +1 @@
+4b57f1b1b9e281231c3fcfc039ce3021e29ff570
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/protobuf-java-util-3.11.3.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-util-3.11.3.jar.sha1
deleted file mode 100644
index 8f8d3cf3c9e49..0000000000000
--- a/plugins/repository-gcs/licenses/protobuf-java-util-3.11.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fd4ba2dfeb1b010eb20ca27e65fbfb74fbbdcdb9
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1
new file mode 100644
index 0000000000000..9ba36d444c541
--- /dev/null
+++ b/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1
@@ -0,0 +1 @@
+3e6812cbbb7e6faffa7b56438740dec510e1fc1a
\ No newline at end of file
diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettings.java
index d15b00712dea4..e8700570d2801 100644
--- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettings.java
+++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettings.java
@@ -36,17 +36,23 @@
import org.opensearch.common.Strings;
import org.opensearch.common.settings.SecureSetting;
+import org.opensearch.common.settings.SecureString;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
+import org.opensearch.common.settings.SettingsException;
import org.opensearch.common.unit.TimeValue;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
+import java.net.InetAddress;
+import java.net.Proxy;
import java.net.URI;
+import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
+import java.util.Locale;
import java.util.Map;
import java.util.function.Function;
@@ -114,6 +120,54 @@ public class GoogleCloudStorageClientSettings {
key -> new Setting<>(key, "repository-gcs", Function.identity(), Setting.Property.NodeScope, Setting.Property.Deprecated)
);
+ /** Proxy type */
+ static final Setting.AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting(
+ PREFIX,
+ "proxy.type",
+ (key) -> new Setting(
+ key,
+ Proxy.Type.DIRECT.name(),
+ s -> Proxy.Type.valueOf(s.toUpperCase(Locale.ROOT)),
+ Setting.Property.NodeScope
+ )
+ );
+
+ /** The host of a proxy to connect */
+ static final Setting.AffixSetting PROXY_HOST_SETTING = Setting.affixKeySetting(
+ PREFIX,
+ "proxy.host",
+ key -> Setting.simpleString(key, Setting.Property.NodeScope),
+ () -> PROXY_TYPE_SETTING
+ );
+
+ /** The port of a proxy to connect */
+ static final Setting.AffixSetting PROXY_PORT_SETTING = Setting.affixKeySetting(
+ PREFIX,
+ "proxy.port",
+ key -> Setting.intSetting(key, 0, 0, (1 << 16) - 1, Setting.Property.NodeScope),
+ () -> PROXY_TYPE_SETTING,
+ () -> PROXY_HOST_SETTING
+ );
+
+ /** The username of a proxy to connect */
+ static final Setting.AffixSetting PROXY_USERNAME_SETTING = Setting.affixKeySetting(
+ PREFIX,
+ "proxy.username",
+ key -> SecureSetting.secureString(key, null),
+ () -> PROXY_TYPE_SETTING,
+ () -> PROXY_HOST_SETTING
+ );
+
+ /** The password of a proxy to connect */
+ static final Setting.AffixSetting PROXY_PASSWORD_SETTING = Setting.affixKeySetting(
+ PREFIX,
+ "proxy.password",
+ key -> SecureSetting.secureString(key, null),
+ () -> PROXY_TYPE_SETTING,
+ () -> PROXY_HOST_SETTING,
+ () -> PROXY_USERNAME_SETTING
+ );
+
/** The credentials used by the client to connect to the Storage endpoint. */
private final ServiceAccountCredentials credential;
@@ -135,6 +189,9 @@ public class GoogleCloudStorageClientSettings {
/** The token server URI. This leases access tokens in the oauth flow. */
private final URI tokenUri;
+ /** The GCS SDK Proxy settings. */
+ private final ProxySettings proxySettings;
+
GoogleCloudStorageClientSettings(
final ServiceAccountCredentials credential,
final String endpoint,
@@ -142,7 +199,8 @@ public class GoogleCloudStorageClientSettings {
final TimeValue connectTimeout,
final TimeValue readTimeout,
final String applicationName,
- final URI tokenUri
+ final URI tokenUri,
+ final ProxySettings proxySettings
) {
this.credential = credential;
this.endpoint = endpoint;
@@ -151,6 +209,7 @@ public class GoogleCloudStorageClientSettings {
this.readTimeout = readTimeout;
this.applicationName = applicationName;
this.tokenUri = tokenUri;
+ this.proxySettings = proxySettings;
}
public ServiceAccountCredentials getCredential() {
@@ -181,6 +240,10 @@ public URI getTokenUri() {
return tokenUri;
}
+ public ProxySettings getProxySettings() {
+ return proxySettings;
+ }
+
public static Map load(final Settings settings) {
final Map clients = new HashMap<>();
for (final String clientName : settings.getGroups(PREFIX).keySet()) {
@@ -202,10 +265,39 @@ static GoogleCloudStorageClientSettings getClientSettings(final Settings setting
getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING),
getConfigValue(settings, clientName, READ_TIMEOUT_SETTING),
getConfigValue(settings, clientName, APPLICATION_NAME_SETTING),
- getConfigValue(settings, clientName, TOKEN_URI_SETTING)
+ getConfigValue(settings, clientName, TOKEN_URI_SETTING),
+ validateAndCreateProxySettings(settings, clientName)
);
}
+ static ProxySettings validateAndCreateProxySettings(final Settings settings, final String clientName) {
+ final Proxy.Type proxyType = getConfigValue(settings, clientName, PROXY_TYPE_SETTING);
+ final String proxyHost = getConfigValue(settings, clientName, PROXY_HOST_SETTING);
+ final int proxyPort = getConfigValue(settings, clientName, PROXY_PORT_SETTING);
+ final SecureString proxyUserName = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING);
+ final SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING);
+ // Validate proxy settings
+ if (proxyType == Proxy.Type.DIRECT
+ && (proxyPort != 0 || Strings.hasText(proxyHost) || Strings.hasText(proxyUserName) || Strings.hasText(proxyPassword))) {
+ throw new SettingsException(
+ "Google Cloud Storage proxy port or host or username or password have been set but proxy type is not defined."
+ );
+ }
+ if (proxyType != Proxy.Type.DIRECT && (proxyPort == 0 || Strings.isEmpty(proxyHost))) {
+ throw new SettingsException("Google Cloud Storage proxy type has been set but proxy host or port is not defined.");
+ }
+ if (proxyType == Proxy.Type.DIRECT) {
+ return ProxySettings.NO_PROXY_SETTINGS;
+ }
+
+ try {
+ final InetAddress proxyHostAddress = InetAddress.getByName(proxyHost);
+ return new ProxySettings(proxyType, proxyHostAddress, proxyPort, proxyUserName.toString(), proxyPassword.toString());
+ } catch (final UnknownHostException e) {
+ throw new SettingsException("Google Cloud Storage proxy host is unknown.", e);
+ }
+ }
+
/**
* Loads the service account file corresponding to a given client name. If no
* file is defined for the client, a {@code null} credential is returned.
diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStoragePlugin.java
index 7d51a6196e4c8..4908b26649b1b 100644
--- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStoragePlugin.java
+++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStoragePlugin.java
@@ -92,7 +92,12 @@ public List> getSettings() {
GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING,
GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING,
GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING,
- GoogleCloudStorageClientSettings.TOKEN_URI_SETTING
+ GoogleCloudStorageClientSettings.TOKEN_URI_SETTING,
+ GoogleCloudStorageClientSettings.PROXY_TYPE_SETTING,
+ GoogleCloudStorageClientSettings.PROXY_HOST_SETTING,
+ GoogleCloudStorageClientSettings.PROXY_PORT_SETTING,
+ GoogleCloudStorageClientSettings.PROXY_USERNAME_SETTING,
+ GoogleCloudStorageClientSettings.PROXY_PASSWORD_SETTING
);
}
diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java
index 8208dcfe597ff..f4b501327d52c 100644
--- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java
+++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java
@@ -50,6 +50,9 @@
import org.opensearch.common.unit.TimeValue;
import java.io.IOException;
+import java.net.Authenticator;
+import java.net.PasswordAuthentication;
+import java.net.Proxy;
import java.net.URI;
import java.util.Map;
@@ -142,13 +145,7 @@ synchronized void closeRepositoryClient(String repositoryName) {
*/
private Storage createClient(GoogleCloudStorageClientSettings clientSettings, GoogleCloudStorageOperationsStats stats)
throws IOException {
- final HttpTransport httpTransport = SocketAccess.doPrivilegedIOException(() -> {
- final NetHttpTransport.Builder builder = new NetHttpTransport.Builder();
- // requires java.lang.RuntimePermission "setFactory"
- // Pin the TLS trust certificates.
- builder.trustCertificates(GoogleUtils.getCertificateTrustStore());
- return builder.build();
- });
+ final HttpTransport httpTransport = createHttpTransport(clientSettings);
final GoogleCloudStorageHttpStatsCollector httpStatsCollector = new GoogleCloudStorageHttpStatsCollector(stats);
@@ -175,6 +172,28 @@ public HttpRequestInitializer getHttpRequestInitializer(ServiceOptions, ?> ser
return storageOptions.getService();
}
+ private HttpTransport createHttpTransport(final GoogleCloudStorageClientSettings clientSettings) throws IOException {
+ return SocketAccess.doPrivilegedIOException(() -> {
+ final NetHttpTransport.Builder builder = new NetHttpTransport.Builder();
+ // requires java.lang.RuntimePermission "setFactory"
+ // Pin the TLS trust certificates.
+ builder.trustCertificates(GoogleUtils.getCertificateTrustStore());
+ final ProxySettings proxySettings = clientSettings.getProxySettings();
+ if (proxySettings != ProxySettings.NO_PROXY_SETTINGS) {
+ if (proxySettings.isAuthenticated()) {
+ Authenticator.setDefault(new Authenticator() {
+ @Override
+ protected PasswordAuthentication getPasswordAuthentication() {
+ return new PasswordAuthentication(proxySettings.getUsername(), proxySettings.getPassword().toCharArray());
+ }
+ });
+ }
+ builder.setProxy(new Proxy(proxySettings.getType(), proxySettings.getAddress()));
+ }
+ return builder.build();
+ });
+ }
+
StorageOptions createStorageOptions(
final GoogleCloudStorageClientSettings clientSettings,
final HttpTransportOptions httpTransportOptions
diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/ProxySettings.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/ProxySettings.java
new file mode 100644
index 0000000000000..ddc6446d2c8c5
--- /dev/null
+++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/ProxySettings.java
@@ -0,0 +1,80 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.repositories.gcs;
+
+import org.opensearch.common.Strings;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.Proxy;
+import java.util.Objects;
+
+public class ProxySettings {
+
+ public static final ProxySettings NO_PROXY_SETTINGS = new ProxySettings(Proxy.Type.DIRECT, null, -1, null, null);
+
+ private final Proxy.Type type;
+
+ private final InetAddress host;
+
+ private final String username;
+
+ private final String password;
+
+ private final int port;
+
+ public ProxySettings(final Proxy.Type type, final InetAddress host, final int port, final String username, final String password) {
+ this.type = type;
+ this.host = host;
+ this.port = port;
+ this.username = username;
+ this.password = password;
+ }
+
+ public Proxy.Type getType() {
+ return this.type;
+ }
+
+ public InetSocketAddress getAddress() {
+ return new InetSocketAddress(host, port);
+ }
+
+ public String getUsername() {
+ return this.username;
+ }
+
+ public String getPassword() {
+ return this.password;
+ }
+
+ public boolean isAuthenticated() {
+ return Strings.isNullOrEmpty(username) == false && Strings.isNullOrEmpty(password) == false;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ final ProxySettings that = (ProxySettings) o;
+ return port == that.port
+ && type == that.type
+ && Objects.equals(host, that.host)
+ && Objects.equals(username, that.username)
+ && Objects.equals(password, that.password);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(type, host, username, password, port);
+ }
+}
diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy
index a6e2299f52f33..48af969b04dc3 100644
--- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy
+++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy
@@ -40,4 +40,7 @@ grant {
// gcs client opens socket connections for to access repository
permission java.net.SocketPermission "*", "connect";
+
+ // gcs client set Authenticator for proxy username/password
+ permission java.net.NetPermission "setDefaultAuthenticator";
};
diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java
index 8dbf6b0ff2873..abf63e5525d4d 100644
--- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java
+++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java
@@ -38,9 +38,13 @@
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
+import org.opensearch.common.settings.SettingsException;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.test.OpenSearchTestCase;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.Proxy;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.security.KeyPair;
@@ -92,6 +96,7 @@ public void testLoad() throws Exception {
assertEquals(expectedClientSettings.getConnectTimeout(), actualClientSettings.getConnectTimeout());
assertEquals(expectedClientSettings.getReadTimeout(), actualClientSettings.getReadTimeout());
assertEquals(expectedClientSettings.getApplicationName(), actualClientSettings.getApplicationName());
+ assertEquals(ProxySettings.NO_PROXY_SETTINGS, actualClientSettings.getProxySettings());
}
if (deprecationWarnings.isEmpty() == false) {
@@ -118,11 +123,131 @@ public void testProjectIdDefaultsToCredentials() throws Exception {
CONNECT_TIMEOUT_SETTING.getDefault(Settings.EMPTY),
READ_TIMEOUT_SETTING.getDefault(Settings.EMPTY),
APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY),
- new URI("")
+ new URI(""),
+ new ProxySettings(Proxy.Type.DIRECT, null, 0, null, null)
);
assertEquals(credential.getProjectId(), googleCloudStorageClientSettings.getProjectId());
}
+ public void testHttpProxySettings() throws Exception {
+ final int port = randomIntBetween(10, 1080);
+ final String userName = randomAlphaOfLength(10);
+ final String password = randomAlphaOfLength(10);
+ final GoogleCloudStorageClientSettings gcsWithHttpProxyWithoutUserPwd = proxyGoogleCloudStorageClientSettings(
+ new ProxySettings(Proxy.Type.HTTP, InetAddress.getByName("127.0.0.10"), port, null, null)
+ );
+
+ assertEquals(Proxy.Type.HTTP, gcsWithHttpProxyWithoutUserPwd.getProxySettings().getType());
+ assertEquals(
+ new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port),
+ gcsWithHttpProxyWithoutUserPwd.getProxySettings().getAddress()
+ );
+ assertNull(gcsWithHttpProxyWithoutUserPwd.getProxySettings().getUsername());
+ assertNull(gcsWithHttpProxyWithoutUserPwd.getProxySettings().getPassword());
+ assertFalse(gcsWithHttpProxyWithoutUserPwd.getProxySettings().isAuthenticated());
+
+ final GoogleCloudStorageClientSettings gcsWithHttpProxyWithUserPwd = proxyGoogleCloudStorageClientSettings(
+ new ProxySettings(Proxy.Type.HTTP, InetAddress.getByName("127.0.0.10"), port, userName, password)
+ );
+
+ assertEquals(Proxy.Type.HTTP, gcsWithHttpProxyWithoutUserPwd.getProxySettings().getType());
+ assertEquals(
+ new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port),
+ gcsWithHttpProxyWithUserPwd.getProxySettings().getAddress()
+ );
+ assertTrue(gcsWithHttpProxyWithUserPwd.getProxySettings().isAuthenticated());
+ assertEquals(userName, gcsWithHttpProxyWithUserPwd.getProxySettings().getUsername());
+ assertEquals(password, gcsWithHttpProxyWithUserPwd.getProxySettings().getPassword());
+ }
+
+ public void testSocksProxySettings() throws Exception {
+ final int port = randomIntBetween(10, 1080);
+ final String userName = randomAlphaOfLength(10);
+ final String password = randomAlphaOfLength(10);
+ final GoogleCloudStorageClientSettings gcsWithHttpProxyWithoutUserPwd = proxyGoogleCloudStorageClientSettings(
+ new ProxySettings(Proxy.Type.SOCKS, InetAddress.getByName("127.0.0.10"), port, null, null)
+ );
+
+ assertEquals(Proxy.Type.SOCKS, gcsWithHttpProxyWithoutUserPwd.getProxySettings().getType());
+ assertEquals(
+ new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port),
+ gcsWithHttpProxyWithoutUserPwd.getProxySettings().getAddress()
+ );
+ assertFalse(gcsWithHttpProxyWithoutUserPwd.getProxySettings().isAuthenticated());
+ assertNull(gcsWithHttpProxyWithoutUserPwd.getProxySettings().getUsername());
+ assertNull(gcsWithHttpProxyWithoutUserPwd.getProxySettings().getPassword());
+
+ final GoogleCloudStorageClientSettings gcsWithHttpProxyWithUserPwd = proxyGoogleCloudStorageClientSettings(
+ new ProxySettings(Proxy.Type.SOCKS, InetAddress.getByName("127.0.0.10"), port, userName, password)
+ );
+
+ assertEquals(Proxy.Type.SOCKS, gcsWithHttpProxyWithoutUserPwd.getProxySettings().getType());
+ assertEquals(
+ new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port),
+ gcsWithHttpProxyWithUserPwd.getProxySettings().getAddress()
+ );
+ assertTrue(gcsWithHttpProxyWithUserPwd.getProxySettings().isAuthenticated());
+ assertEquals(userName, gcsWithHttpProxyWithUserPwd.getProxySettings().getUsername());
+ assertEquals(password, gcsWithHttpProxyWithUserPwd.getProxySettings().getPassword());
+ }
+
+ public void testProxyWrongHost() {
+ final Settings settings = Settings.builder()
+ .put("gcs.client.default.proxy.type", randomFrom("socks", "http"))
+ .put("gcs.client.default.proxy.host", "thisisnotavalidhostorwehavebeensuperunlucky")
+ .put("gcs.client.default.proxy.port", 8080)
+ .build();
+ final SettingsException e = expectThrows(SettingsException.class, () -> GoogleCloudStorageClientSettings.load(settings));
+ assertEquals("Google Cloud Storage proxy host is unknown.", e.getMessage());
+ }
+
+ public void testProxyTypeNotSet() {
+ final Settings hostPortSettings = Settings.builder()
+ .put("gcs.client.default.proxy.host", "127.0.0.1")
+ .put("gcs.client.default.proxy.port", 8080)
+ .build();
+
+ SettingsException e = expectThrows(SettingsException.class, () -> GoogleCloudStorageClientSettings.load(hostPortSettings));
+ assertEquals(
+ "Google Cloud Storage proxy port or host or username or password have been set but proxy type is not defined.",
+ e.getMessage()
+ );
+
+ final MockSecureSettings secureSettings = new MockSecureSettings();
+ secureSettings.setString("gcs.client.default.proxy.username", "aaaa");
+ secureSettings.setString("gcs.client.default.proxy.password", "bbbb");
+ final Settings usernamePasswordSettings = Settings.builder().setSecureSettings(secureSettings).build();
+
+ e = expectThrows(SettingsException.class, () -> GoogleCloudStorageClientSettings.load(usernamePasswordSettings));
+ assertEquals(
+ "Google Cloud Storage proxy port or host or username or password have been set but proxy type is not defined.",
+ e.getMessage()
+ );
+ }
+
+ public void testProxyHostNotSet() {
+ final Settings settings = Settings.builder()
+ .put("gcs.client.default.proxy.port", 8080)
+ .put("gcs.client.default.proxy.type", randomFrom("socks", "http"))
+ .build();
+ final SettingsException e = expectThrows(SettingsException.class, () -> GoogleCloudStorageClientSettings.load(settings));
+ assertEquals("Google Cloud Storage proxy type has been set but proxy host or port is not defined.", e.getMessage());
+ }
+
+ private GoogleCloudStorageClientSettings proxyGoogleCloudStorageClientSettings(final ProxySettings proxySettings) throws Exception {
+ final String clientName = randomAlphaOfLength(5);
+ return new GoogleCloudStorageClientSettings(
+ randomCredential(clientName).v1(),
+ ENDPOINT_SETTING.getDefault(Settings.EMPTY),
+ PROJECT_ID_SETTING.getDefault(Settings.EMPTY),
+ CONNECT_TIMEOUT_SETTING.getDefault(Settings.EMPTY),
+ READ_TIMEOUT_SETTING.getDefault(Settings.EMPTY),
+ APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY),
+ new URI(""),
+ proxySettings
+ );
+ }
+
/** Generates a given number of GoogleCloudStorageClientSettings along with the Settings to build them from **/
private Tuple, Settings> randomClients(
final int nbClients,
@@ -216,7 +341,8 @@ private static GoogleCloudStorageClientSettings randomClient(
connectTimeout,
readTimeout,
applicationName,
- new URI("")
+ new URI(""),
+ new ProxySettings(Proxy.Type.DIRECT, null, 0, null, null)
);
}
diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java
index 7792a5f51c459..c5a3a26be082f 100644
--- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java
+++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java
@@ -35,7 +35,7 @@
import com.google.auth.Credentials;
import com.google.cloud.http.HttpTransportOptions;
import com.google.cloud.storage.Storage;
-
+import org.hamcrest.Matchers;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Setting;
@@ -43,7 +43,6 @@
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.test.OpenSearchTestCase;
-import org.hamcrest.Matchers;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
@@ -51,9 +50,9 @@
import java.util.Locale;
import java.util.UUID;
-import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
-import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
public class GoogleCloudStorageServiceTests extends OpenSearchTestCase {
diff --git a/plugins/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml b/plugins/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml
index dfd0ecc5788b1..f087a004efdf2 100644
--- a/plugins/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml
+++ b/plugins/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml
@@ -48,17 +48,14 @@ setup:
body:
- index:
_index: docs
- _type: doc
_id: 1
- snapshot: one
- index:
_index: docs
- _type: doc
_id: 2
- snapshot: one
- index:
_index: docs
- _type: doc
_id: 3
- snapshot: one
@@ -96,22 +93,18 @@ setup:
body:
- index:
_index: docs
- _type: doc
_id: 4
- snapshot: two
- index:
_index: docs
- _type: doc
_id: 5
- snapshot: two
- index:
_index: docs
- _type: doc
_id: 6
- snapshot: two
- index:
_index: docs
- _type: doc
_id: 7
- snapshot: two
diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle
index 9688835d0853f..6d2966faa59cf 100644
--- a/plugins/repository-hdfs/build.gradle
+++ b/plugins/repository-hdfs/build.gradle
@@ -67,14 +67,14 @@ dependencies {
api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
api 'com.google.code.gson:gson:2.8.9'
runtimeOnly 'com.google.guava:guava:30.1.1-jre'
- api 'com.google.protobuf:protobuf-java:2.5.0'
+ api 'com.google.protobuf:protobuf-java:3.19.3'
api 'commons-logging:commons-logging:1.1.3'
api 'commons-cli:commons-cli:1.2'
api "commons-codec:commons-codec:${versions.commonscodec}"
api 'commons-collections:commons-collections:3.2.2'
api 'org.apache.commons:commons-compress:1.21'
api 'org.apache.commons:commons-configuration2:2.7'
- api 'commons-io:commons-io:2.7'
+ api 'commons-io:commons-io:2.11.0'
api 'org.apache.commons:commons-lang3:3.7'
implementation 'com.google.re2j:re2j:1.1'
api 'javax.servlet:servlet-api:2.5'
@@ -83,7 +83,7 @@ dependencies {
api 'net.minidev:json-smart:2.4.7'
api 'org.apache.zookeeper:zookeeper:3.7.0'
api "io.netty:netty-all:${versions.netty}"
- implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1'
+ implementation 'com.fasterxml.woodstox:woodstox-core:6.2.8'
implementation 'org.codehaus.woodstox:stax2-api:4.2.1'
hdfsFixture project(':test:fixtures:hdfs-fixture')
@@ -113,6 +113,19 @@ tasks.named("dependencyLicenses").configure {
mapping from: /hadoop-.*/, to: 'hadoop'
}
+thirdPartyAudit {
+ ignoreViolations(
+ // uses internal java api: sun.misc.Unsafe
+ 'com.google.protobuf.MessageSchema',
+ 'com.google.protobuf.UnsafeUtil',
+ 'com.google.protobuf.UnsafeUtil$1',
+ 'com.google.protobuf.UnsafeUtil$Android32MemoryAccessor',
+ 'com.google.protobuf.UnsafeUtil$Android64MemoryAccessor',
+ 'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor',
+ 'com.google.protobuf.UnsafeUtil$MemoryAccessor'
+ )
+}
+
tasks.named("integTest").configure {
it.dependsOn(project.tasks.named("bundlePlugin"))
}
@@ -235,7 +248,7 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
)
}
}
-
+
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
jvmArgs += ["--add-opens", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]
}
diff --git a/plugins/repository-hdfs/licenses/commons-io-2.11.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.11.0.jar.sha1
new file mode 100644
index 0000000000000..8adec30bade49
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/commons-io-2.11.0.jar.sha1
@@ -0,0 +1 @@
+a2503f302b11ebde7ebc3df41daebe0e4eea3689
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/commons-io-2.7.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.7.jar.sha1
deleted file mode 100644
index bbb1b15dd1e1e..0000000000000
--- a/plugins/repository-hdfs/licenses/commons-io-2.7.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3f2bd4ba11c4162733c13cc90ca7c7ea09967102
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/jackson-databind-2.12.5.jar.sha1 b/plugins/repository-hdfs/licenses/jackson-databind-2.12.5.jar.sha1
deleted file mode 100644
index ca1bd46bc3cd3..0000000000000
--- a/plugins/repository-hdfs/licenses/jackson-databind-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b064cf057f23d3d35390328c5030847efeffedde
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/jackson-databind-2.12.6.jar.sha1 b/plugins/repository-hdfs/licenses/jackson-databind-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..f74842887d31b
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/jackson-databind-2.12.6.jar.sha1
@@ -0,0 +1 @@
+fac216b606c1086e36acea6e572ee61572ad1670
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.72.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.72.Final.jar.sha1
deleted file mode 100644
index dc2119726f690..0000000000000
--- a/plugins/repository-hdfs/licenses/netty-all-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-86a69bf2f38d9f9f05c528e158f1532d3c6d625e
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.73.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..52d6f22e73013
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/netty-all-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+75c5a0ddb28adcc9e4991c75678d4a85dfe4a0b3
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1 b/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1
deleted file mode 100644
index 71f918819e2b6..0000000000000
--- a/plugins/repository-hdfs/licenses/protobuf-java-2.5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a10732c76bfacdbd633a7eb0f7968b1059a65dfa
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1 b/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1
new file mode 100644
index 0000000000000..655ecd1f1c1c9
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1
@@ -0,0 +1 @@
+4b57f1b1b9e281231c3fcfc039ce3021e29ff570
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/woodstox-core-6.1.1.jar.sha1 b/plugins/repository-hdfs/licenses/woodstox-core-6.1.1.jar.sha1
deleted file mode 100644
index f2ad1c80882d3..0000000000000
--- a/plugins/repository-hdfs/licenses/woodstox-core-6.1.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-989bb31963ed1758b95c7c4381a91592a9a8df61
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/woodstox-core-6.2.8.jar.sha1 b/plugins/repository-hdfs/licenses/woodstox-core-6.2.8.jar.sha1
new file mode 100644
index 0000000000000..ae65cdebf26de
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/woodstox-core-6.2.8.jar.sha1
@@ -0,0 +1 @@
+670748292899c53b1963730d9eb7f8ab71314e90
\ No newline at end of file
diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle
index 1ebd6c5c50ffe..21ad7b6dd54c1 100644
--- a/plugins/repository-s3/build.gradle
+++ b/plugins/repository-s3/build.gradle
@@ -65,7 +65,7 @@ dependencies {
// HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here,
// and whitelist this hack in JarHell
- api 'javax.xml.bind:jaxb-api:2.2.2'
+ api 'javax.xml.bind:jaxb-api:2.3.1'
testImplementation project(':test:fixtures:s3-fixture')
}
diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.12.5.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.12.5.jar.sha1
deleted file mode 100644
index 797bcf2b161d4..0000000000000
--- a/plugins/repository-s3/licenses/jackson-annotations-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-52d929d5bb21d0186fe24c09624cc3ee4bafc3b3
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.12.6.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..48ee3bf53c630
--- /dev/null
+++ b/plugins/repository-s3/licenses/jackson-annotations-2.12.6.jar.sha1
@@ -0,0 +1 @@
+9487231edd6b0b1f14692c9cba9e0462809215d1
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-databind-2.12.5.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.12.5.jar.sha1
deleted file mode 100644
index ca1bd46bc3cd3..0000000000000
--- a/plugins/repository-s3/licenses/jackson-databind-2.12.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b064cf057f23d3d35390328c5030847efeffedde
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jackson-databind-2.12.6.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.12.6.jar.sha1
new file mode 100644
index 0000000000000..f74842887d31b
--- /dev/null
+++ b/plugins/repository-s3/licenses/jackson-databind-2.12.6.jar.sha1
@@ -0,0 +1 @@
+fac216b606c1086e36acea6e572ee61572ad1670
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1
deleted file mode 100644
index a37e187238933..0000000000000
--- a/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-aeb3021ca93dde265796d82015beecdcff95bf09
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/jaxb-api-2.3.1.jar.sha1 b/plugins/repository-s3/licenses/jaxb-api-2.3.1.jar.sha1
new file mode 100644
index 0000000000000..f4434214e1eec
--- /dev/null
+++ b/plugins/repository-s3/licenses/jaxb-api-2.3.1.jar.sha1
@@ -0,0 +1 @@
+8531ad5ac454cc2deb9d4d32c40c4d7451939b5d
\ No newline at end of file
diff --git a/plugins/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml b/plugins/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml
index 0ec3d272ee02d..a0c2d2e593a47 100644
--- a/plugins/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml
+++ b/plugins/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml
@@ -131,17 +131,14 @@ setup:
body:
- index:
_index: docs
- _type: doc
_id: 1
- snapshot: one
- index:
_index: docs
- _type: doc
_id: 2
- snapshot: one
- index:
_index: docs
- _type: doc
_id: 3
- snapshot: one
diff --git a/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml b/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml
index 09e59c7fc9d9a..fbbdcb8f153e0 100644
--- a/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml
+++ b/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml
@@ -19,7 +19,6 @@
id: 1
- match: { _index: smb-test }
- - match: { _type: _doc }
- match: { _id: "1"}
- match: { _version: 1}
- match: { _source: { foo: bar }}
diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle
index 6e309e109247b..88355cdf22728 100644
--- a/plugins/transport-nio/build.gradle
+++ b/plugins/transport-nio/build.gradle
@@ -82,10 +82,7 @@ thirdPartyAudit {
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
'org.bouncycastle.cert.X509v3CertificateBuilder',
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
- 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
- 'org.bouncycastle.jce.provider.BouncyCastleProvider',
'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder',
- 'org.bouncycastle.asn1.x500.X500Name',
// from io.netty.handler.ssl.JettyNpnSslEngine (netty)
'org.eclipse.jetty.npn.NextProtoNego$ClientProvider',
diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.72.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.72.Final.jar.sha1
deleted file mode 100644
index f1398e52d8c74..0000000000000
--- a/plugins/transport-nio/licenses/netty-buffer-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f306eec3f79541f9b8af9c471a0d5b63b7996272
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.73.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..e5833785ebb7e
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-buffer-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+244a569c9aae973f6f485ac9801d79c1eca36daa
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.72.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.72.Final.jar.sha1
deleted file mode 100644
index f70b5c0909d7c..0000000000000
--- a/plugins/transport-nio/licenses/netty-codec-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-613c4019d687db4e9a5532564e442f83c4474ed7
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.73.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..dcdc1e4e58afe
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-codec-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+9496a30a349863a4c6fa10d5c36b4f3b495d3a31
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.72.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.72.Final.jar.sha1
deleted file mode 100644
index 8c7611afca886..0000000000000
--- a/plugins/transport-nio/licenses/netty-codec-http-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a8f062d67303a5e4b2bc2ad48fb4fd8c99108e45
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.73.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..374cfb98614d5
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+1ceeac4429b9bd517dc05e376a144bbe6b6bd038
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-common-4.1.72.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.72.Final.jar.sha1
deleted file mode 100644
index bfdf4a5cf8585..0000000000000
--- a/plugins/transport-nio/licenses/netty-common-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a55bac9c3af5f59828207b551a96ac19bbfc341e
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-common-4.1.73.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..e80a6e2569d81
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-common-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+27731b58d741b6faa6a00fa3285e7a55cc47be01
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.72.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.72.Final.jar.sha1
deleted file mode 100644
index d6cc1771a2964..0000000000000
--- a/plugins/transport-nio/licenses/netty-handler-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9feee089fee606c64be90c0332db9aef1f7d8e46
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.73.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..0e227997874bf
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-handler-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+1a2231c0074f88254865c3769a4b5842939ea04d
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.72.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.72.Final.jar.sha1
deleted file mode 100644
index d08a6f6e7e42d..0000000000000
--- a/plugins/transport-nio/licenses/netty-resolver-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4ff458458ea32ed1156086820b624a815fcbf2c0
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.73.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..ba24531724fb5
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-resolver-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+bfe83710f0c1739019613e81a06101020ca65def
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.72.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.72.Final.jar.sha1
deleted file mode 100644
index 603f145303012..0000000000000
--- a/plugins/transport-nio/licenses/netty-transport-4.1.72.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-99138b436a584879355aca8fe3c64b46227d5d79
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.73.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.73.Final.jar.sha1
new file mode 100644
index 0000000000000..6a8647497f210
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-transport-4.1.73.Final.jar.sha1
@@ -0,0 +1 @@
+abb155ddff196ccedfe85b810d4b9375ef85fcfa
\ No newline at end of file
diff --git a/plugins/transport-nio/src/main/java/org/opensearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/opensearch/http/nio/HttpReadWriteHandler.java
index 1d705bce64852..561695c06effe 100644
--- a/plugins/transport-nio/src/main/java/org/opensearch/http/nio/HttpReadWriteHandler.java
+++ b/plugins/transport-nio/src/main/java/org/opensearch/http/nio/HttpReadWriteHandler.java
@@ -199,8 +199,8 @@ private static boolean assertMessageTypes(Object message) {
+ ". Found type: "
+ message.getClass()
+ ".";
- assert ((HttpPipelinedResponse) message)
- .getDelegateRequest() instanceof NioHttpResponse : "This channel only pipelined responses with a delegate of type: "
+ assert ((HttpPipelinedResponse) message).getDelegateRequest() instanceof NioHttpResponse
+ : "This channel only pipelined responses with a delegate of type: "
+ NioHttpResponse.class
+ ". Found type: "
+ ((HttpPipelinedResponse) message).getDelegateRequest().getClass()
diff --git a/qa/evil-tests/src/test/java/org/opensearch/cluster/routing/EvilSystemPropertyTests.java b/qa/evil-tests/src/test/java/org/opensearch/cluster/routing/EvilSystemPropertyTests.java
deleted file mode 100644
index a53683e38f8a9..0000000000000
--- a/qa/evil-tests/src/test/java/org/opensearch/cluster/routing/EvilSystemPropertyTests.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package org.opensearch.cluster.routing;
-
-import org.opensearch.common.SuppressForbidden;
-import org.opensearch.common.settings.ClusterSettings;
-import org.opensearch.common.settings.Settings;
-import org.opensearch.test.OpenSearchTestCase;
-
-import static org.hamcrest.Matchers.equalTo;
-
-public class EvilSystemPropertyTests extends OpenSearchTestCase {
-
- @SuppressForbidden(reason = "manipulates system properties for testing")
- public void testDisableSearchAllocationAwareness() {
- Settings indexSettings = Settings.builder()
- .put("cluster.routing.allocation.awareness.attributes", "test")
- .build();
- OperationRouting routing = new OperationRouting(indexSettings,
- new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
- assertWarnings(OperationRouting.IGNORE_AWARENESS_ATTRIBUTES_DEPRECATION_MESSAGE);
- assertThat(routing.getAwarenessAttributes().size(), equalTo(1));
- assertThat(routing.getAwarenessAttributes().get(0), equalTo("test"));
- System.setProperty("opensearch.search.ignore_awareness_attributes", "true");
- try {
- routing = new OperationRouting(indexSettings,
- new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
- assertTrue(routing.getAwarenessAttributes().isEmpty());
- } finally {
- System.clearProperty("opensearch.search.ignore_awareness_attributes");
- }
-
- }
-}
diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java
index 3c6232569f3f1..629e325427162 100644
--- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java
+++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java
@@ -49,10 +49,6 @@
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.support.XContentMapValues;
import org.opensearch.index.IndexSettings;
-import org.opensearch.rest.action.document.RestBulkAction;
-import org.opensearch.rest.action.document.RestIndexAction;
-import org.opensearch.rest.action.document.RestUpdateAction;
-import org.opensearch.rest.action.search.RestExplainAction;
import org.opensearch.test.NotEqualMessageBuilder;
import org.opensearch.test.XContentTestUtils;
import org.opensearch.test.rest.OpenSearchRestTestCase;
@@ -97,6 +93,7 @@
* with {@code tests.is_old_cluster} set to {@code false}.
*/
public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
+
private String index;
private String type;
@@ -163,6 +160,7 @@ public void testSearch() throws Exception {
count,
true,
true,
+ randomBoolean(),
i -> JsonXContent.contentBuilder().startObject()
.field("string", randomAlphaOfLength(10))
.field("int", randomInt(100))
@@ -182,7 +180,7 @@ public void testSearch() throws Exception {
assertBasicSearchWorks(count);
assertAllSearchWorks(count);
assertBasicAggregationWorks();
- assertRealtimeGetWorks(type);
+ assertRealtimeGetWorks();
assertStoredBinaryFields(count);
}
@@ -198,9 +196,6 @@ public void testNewReplicasWork() throws Exception {
}
{
mappingsAndSettings.startObject("mappings");
- if (isRunningAgainstAncientCluster()) {
- mappingsAndSettings.startObject(type);
- }
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("field");
@@ -208,21 +203,17 @@ public void testNewReplicasWork() throws Exception {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
- if (isRunningAgainstAncientCluster()) {
- mappingsAndSettings.endObject();
- }
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
- createIndex.setOptions(allowTypesRemovalWarnings());
client().performRequest(createIndex);
int numDocs = randomIntBetween(2000, 3000);
indexRandomDocuments(
- numDocs, true, false, i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
+ numDocs, true, false, randomBoolean(), i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
logger.info("Refreshing [{}]", index);
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
} else {
@@ -304,9 +295,6 @@ public void testShrink() throws IOException {
{
mappingsAndSettings.startObject("mappings");
{
- if (isRunningAgainstAncientCluster()) {
- mappingsAndSettings.startObject(type);
- }
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("field");
@@ -316,30 +304,23 @@ public void testShrink() throws IOException {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
- if (isRunningAgainstAncientCluster()) {
- mappingsAndSettings.endObject();
- }
}
mappingsAndSettings.endObject();
- if (isRunningAgainstAncientCluster() == false) {
- // the default number of shards is now one so we have to set the number of shards to be more than one explicitly
- mappingsAndSettings.startObject("settings");
- {
- mappingsAndSettings.field("index.number_of_shards", 5);
- }
- mappingsAndSettings.endObject();
+ mappingsAndSettings.startObject("settings");
+ {
+ mappingsAndSettings.field("index.number_of_shards", 5);
}
+ mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
- createIndex.setOptions(allowTypesRemovalWarnings());
client().performRequest(createIndex);
numDocs = randomIntBetween(512, 1024);
indexRandomDocuments(
- numDocs, true, true, i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
+ numDocs, true, true, randomBoolean(), i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
@@ -387,9 +368,6 @@ public void testShrinkAfterUpgrade() throws IOException {
{
mappingsAndSettings.startObject("mappings");
{
- if (isRunningAgainstAncientCluster()) {
- mappingsAndSettings.startObject(type);
- }
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("field");
@@ -399,23 +377,17 @@ public void testShrinkAfterUpgrade() throws IOException {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
- if (isRunningAgainstAncientCluster()) {
- mappingsAndSettings.endObject();
- }
}
mappingsAndSettings.endObject();
- if (isRunningAgainstAncientCluster() == false) {
- // the default number of shards is now one so we have to set the number of shards to be more than one explicitly
- mappingsAndSettings.startObject("settings");
- mappingsAndSettings.field("index.number_of_shards", 5);
- mappingsAndSettings.endObject();
- }
+ // the default number of shards is now one so we have to set the number of shards to be more than one explicitly
+ mappingsAndSettings.startObject("settings");
+ mappingsAndSettings.field("index.number_of_shards", 5);
+ mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
- createIndex.setOptions(allowTypesRemovalWarnings());
client().performRequest(createIndex);
numDocs = randomIntBetween(512, 1024);
@@ -423,6 +395,7 @@ public void testShrinkAfterUpgrade() throws IOException {
numDocs,
true,
true,
+ randomBoolean(),
i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()
);
} else {
@@ -491,15 +464,13 @@ public void testRollover() throws IOException {
bulk.append("{\"index\":{}}\n");
bulk.append("{\"test\":\"test\"}\n");
}
- Request bulkRequest = new Request("POST", "/" + index + "_write/" + type + "/_bulk");
+ Request bulkRequest = new Request("POST", "/" + index + "_write/_bulk");
bulkRequest.setJsonEntity(bulk.toString());
bulkRequest.addParameter("refresh", "");
- bulkRequest.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
assertThat(EntityUtils.toString(client().performRequest(bulkRequest).getEntity()), containsString("\"errors\":false"));
if (isRunningAgainstOldCluster()) {
Request rolloverRequest = new Request("POST", "/" + index + "_write/_rollover");
- rolloverRequest.setOptions(allowTypesRemovalWarnings());
rolloverRequest.setJsonEntity("{"
+ " \"conditions\": {"
+ " \"max_docs\": 5"
@@ -569,12 +540,10 @@ void assertAllSearchWorks(int count) throws IOException {
// the 'string' field has a boost of 4 in the mappings so it should get a payload boost
String stringValue = (String) XContentMapValues.extractValue("_source.string", bestHit);
assertNotNull(stringValue);
- String type = (String) bestHit.get("_type");
String id = (String) bestHit.get("_id");
- Request explainRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain");
+ Request explainRequest = new Request("GET", "/" + index + "/_explain" + "/" + id);
explainRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
- explainRequest.setOptions(expectWarnings(RestExplainAction.TYPES_DEPRECATION_MESSAGE));
String explanation = toStr(client().performRequest(explainRequest));
assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost"));
@@ -614,7 +583,7 @@ void assertBasicAggregationWorks() throws IOException {
assertTotalHits(termsCount, boolTerms);
}
- void assertRealtimeGetWorks(final String typeName) throws IOException {
+ void assertRealtimeGetWorks() throws IOException {
Request disableAutoRefresh = new Request("PUT", "/" + index + "/_settings");
disableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : -1 }}");
client().performRequest(disableAutoRefresh);
@@ -625,12 +594,11 @@ void assertRealtimeGetWorks(final String typeName) throws IOException {
Map, ?> hit = (Map, ?>) ((List>)(XContentMapValues.extractValue("hits.hits", searchResponse))).get(0);
String docId = (String) hit.get("_id");
- Request updateRequest = new Request("POST", "/" + index + "/" + typeName + "/" + docId + "/_update");
- updateRequest.setOptions(expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE));
+ Request updateRequest = new Request("POST", "/" + index + "/_update/" + docId);
updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}");
client().performRequest(updateRequest);
- Request getRequest = new Request("GET", "/" + index + "/" + typeName + "/" + docId);
+ Request getRequest = new Request("GET", "/" + index + "/" + type + "/" + docId);
Map getRsp = entityAsMap(client().performRequest(getRequest));
Map, ?> source = (Map, ?>) getRsp.get("_source");
assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo"));
@@ -696,7 +664,6 @@ public void testEmptyShard() throws IOException {
// before timing out
.put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")
.put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster
- settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
if (randomBoolean()) {
settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1");
}
@@ -719,8 +686,13 @@ public void testRecovery() throws Exception {
* an index without a translog so we randomize whether
* or not we have one. */
shouldHaveTranslog = randomBoolean();
-
- indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
+ Settings.Builder settings = Settings.builder();
+ if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ }
+ final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null;
+ createIndex(index, settings.build(), mappings);
+ indexRandomDocuments(count, true, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
// make sure all recoveries are done
ensureGreen(index);
@@ -731,28 +703,26 @@ public void testRecovery() throws Exception {
flushRequest.addParameter("wait_if_ongoing", "true");
assertOK(client().performRequest(flushRequest));
- if (randomBoolean()) {
- performSyncedFlush(index, randomBoolean());
- }
if (shouldHaveTranslog) {
// Update a few documents so we are sure to have a translog
indexRandomDocuments(
- count / 10,
- false, // flushing here would invalidate the whole thing
- false,
- i -> jsonBuilder().startObject().field("field", "value").endObject()
+ count / 10,
+ false, // flushing here would invalidate the whole thing
+ false,
+ true,
+ i -> jsonBuilder().startObject().field("field", "value").endObject()
);
}
- saveInfoDocument("should_have_translog", Boolean.toString(shouldHaveTranslog));
+ saveInfoDocument(index + "_should_have_translog", Boolean.toString(shouldHaveTranslog));
} else {
count = countOfIndexedRandomDocuments();
- shouldHaveTranslog = Booleans.parseBoolean(loadInfoDocument("should_have_translog"));
+ shouldHaveTranslog = Booleans.parseBoolean(loadInfoDocument(index + "_should_have_translog"));
}
// Count the documents in the index to make sure we have as many as we put there
Request countRequest = new Request("GET", "/" + index + "/_search");
countRequest.addParameter("size", "0");
- refresh();
+ refreshAllIndices();
Map countResponse = entityAsMap(client().performRequest(countRequest));
assertTotalHits(count, countResponse);
@@ -785,6 +755,7 @@ public void testRecovery() throws Exception {
String currentLuceneVersion = Version.CURRENT.luceneVersion.toString();
String bwcLuceneVersion = getOldClusterVersion().luceneVersion.toString();
+ String minCompatibleBWCVersion = Version.CURRENT.minimumCompatibilityVersion().luceneVersion.toString();
if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) {
int numCurrentVersion = 0;
int numBwcVersion = 0;
@@ -803,6 +774,10 @@ public void testRecovery() throws Exception {
numCurrentVersion++;
} else if (bwcLuceneVersion.equals(version)) {
numBwcVersion++;
+ } else if (minCompatibleBWCVersion.equals(version) && minCompatibleBWCVersion.equals(bwcLuceneVersion) == false) {
+ // Our upgrade path from 7.non-last always goes through 7.last, which depending on timing can create 7.last
+ // index segment. We ignore those.
+ continue;
} else {
fail("expected version to be one of [" + currentLuceneVersion + "," + bwcLuceneVersion + "] but was " + line);
}
@@ -828,7 +803,12 @@ public void testSnapshotRestore() throws IOException {
if (isRunningAgainstOldCluster()) {
// Create the index
count = between(200, 300);
- indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
+ Settings.Builder settings = Settings.builder();
+ if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ }
+ createIndex(index, settings.build());
+ indexRandomDocuments(count, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject());
} else {
count = countOfIndexedRandomDocuments();
}
@@ -856,9 +836,6 @@ public void testSnapshotRestore() throws IOException {
}
templateBuilder.endObject();
templateBuilder.startObject("mappings"); {
- if (isRunningAgainstAncientCluster()) {
- templateBuilder.startObject(type);
- }
{
templateBuilder.startObject("_source");
{
@@ -866,9 +843,6 @@ public void testSnapshotRestore() throws IOException {
}
templateBuilder.endObject();
}
- if (isRunningAgainstAncientCluster()) {
- templateBuilder.endObject();
- }
}
templateBuilder.endObject();
templateBuilder.startObject("aliases"); {
@@ -887,7 +861,6 @@ public void testSnapshotRestore() throws IOException {
templateBuilder.endObject().endObject();
Request createTemplateRequest = new Request("PUT", "/_template/test_template");
createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder));
- createTemplateRequest.setOptions(allowTypesRemovalWarnings());
client().performRequest(createTemplateRequest);
@@ -978,13 +951,10 @@ public void testSoftDeletes() throws Exception {
int numDocs = between(10, 100);
for (int i = 0; i < numDocs; i++) {
String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject());
- Request request = new Request("POST", "/" + index + "/" + type + "/" + i);
- if (isRunningAgainstAncientCluster() == false) {
- request.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
- }
+ Request request = new Request("POST", "/" + index + "/_doc/" + i);
request.setJsonEntity(doc);
client().performRequest(request);
- refresh();
+ refreshAllIndices();
}
client().performRequest(new Request("POST", "/" + index + "/_flush"));
int liveDocs = numDocs;
@@ -992,19 +962,19 @@ public void testSoftDeletes() throws Exception {
for (int i = 0; i < numDocs; i++) {
if (randomBoolean()) {
String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v2").endObject());
- Request request = new Request("POST", "/" + index + "/" + type + "/" + i);
+ Request request = new Request("POST", "/" + index + "/_doc/" + i);
request.setJsonEntity(doc);
client().performRequest(request);
} else if (randomBoolean()) {
- client().performRequest(new Request("DELETE", "/" + index + "/" + type + "/" + i));
+ client().performRequest(new Request("DELETE", "/" + index + "/_doc/" + i));
liveDocs--;
}
}
- refresh();
+ refreshAllIndices();
assertTotalHits(liveDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))));
- saveInfoDocument("doc_count", Integer.toString(liveDocs));
+ saveInfoDocument(index + "_doc_count", Integer.toString(liveDocs));
} else {
- int liveDocs = Integer.parseInt(loadInfoDocument("doc_count"));
+ int liveDocs = Integer.parseInt(loadInfoDocument(index + "_doc_count"));
assertTotalHits(liveDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))));
}
}
@@ -1139,10 +1109,9 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver
bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n");
bulk.append("{\"test\":\"test\"}\n");
}
- Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/" + type + "/_bulk");
+ Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/_bulk");
writeToRestoredRequest.addParameter("refresh", "true");
writeToRestoredRequest.setJsonEntity(bulk.toString());
- writeToRestoredRequest.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
assertThat(EntityUtils.toString(client().performRequest(writeToRestoredRequest).getEntity()), containsString("\"errors\":false"));
// And count to make sure the add worked
@@ -1150,7 +1119,7 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver
Request countAfterWriteRequest = new Request("GET", "/restored_" + index + "/_search");
countAfterWriteRequest.addParameter("size", "0");
Map countAfterResponse = entityAsMap(client().performRequest(countRequest));
- assertTotalHits(count+extras, countAfterResponse);
+ assertTotalHits(count + extras, countAfterResponse);
// Clean up the index for the next iteration
client().performRequest(new Request("DELETE", "/restored_*"));
@@ -1160,24 +1129,17 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver
clusterSettingsRequest.addParameter("flat_settings", "true");
Map clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest));
@SuppressWarnings("unchecked") final Map persistentSettings =
- (Map)clusterSettingsResponse.get("persistent");
+ (Map) clusterSettingsResponse.get("persistent");
assertThat(persistentSettings.get("cluster.routing.allocation.exclude.test_attr"), equalTo(getOldClusterVersion().toString()));
// Check that the template was restored successfully
Request getTemplateRequest = new Request("GET", "/_template/test_template");
- getTemplateRequest.setOptions(allowTypesRemovalWarnings());
Map getTemplateResponse = entityAsMap(client().performRequest(getTemplateRequest));
Map expectedTemplate = new HashMap<>();
expectedTemplate.put("index_patterns", singletonList("evil_*"));
expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
- // We don't have the type in the response starting with 7.0, but we won't have it on old cluster after upgrade
- // either so look at the response to figure out the correct assertions
- if (isTypeInTemplateResponse(getTemplateResponse)) {
- expectedTemplate.put("mappings", singletonMap(type, singletonMap("_source", singletonMap("enabled", true))));
- } else {
- expectedTemplate.put("mappings", singletonMap("_source", singletonMap("enabled", true)));
- }
+ expectedTemplate.put("mappings", singletonMap("_source", singletonMap("enabled", true)));
expectedTemplate.put("order", 0);
Map aliases = new HashMap<>();
@@ -1193,32 +1155,23 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver
}
}
- @SuppressWarnings("unchecked")
- private boolean isTypeInTemplateResponse(Map getTemplateResponse) {
- return ( (Map) (
- (Map) getTemplateResponse.getOrDefault("test_template", emptyMap())
- ).get("mappings")).get("_source") == null;
- }
-
// TODO tests for upgrades after shrink. We've had trouble with shrink in the past.
private void indexRandomDocuments(
- final int count,
- final boolean flushAllowed,
- final boolean saveInfo,
- final CheckedFunction docSupplier)
- throws IOException {
+ final int count,
+ final boolean flushAllowed,
+ final boolean saveInfo,
+ final boolean specifyId,
+ final CheckedFunction docSupplier
+ ) throws IOException {
logger.info("Indexing {} random documents", count);
for (int i = 0; i < count; i++) {
logger.debug("Indexing document [{}]", i);
- Request createDocument = new Request("POST", "/" + index + "/" + type + "/" + i);
- if (isRunningAgainstAncientCluster() == false) {
- createDocument.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
- }
+ Request createDocument = new Request("POST", "/" + index + "/_doc/" + (specifyId ? i : ""));
createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i)));
client().performRequest(createDocument);
if (rarely()) {
- refresh();
+ refreshAllIndices();
}
if (flushAllowed && rarely()) {
logger.debug("Flushing [{}]", index);
@@ -1226,7 +1179,7 @@ private void indexRandomDocuments(
}
}
if (saveInfo) {
- saveInfoDocument("count", Integer.toString(count));
+ saveInfoDocument(index + "_count", Integer.toString(count));
}
}
@@ -1237,25 +1190,22 @@ private void indexDocument(String id) throws IOException {
}
private int countOfIndexedRandomDocuments() throws IOException {
- return Integer.parseInt(loadInfoDocument("count"));
+ return Integer.parseInt(loadInfoDocument(index + "_count"));
}
- private void saveInfoDocument(String type, String value) throws IOException {
+ private void saveInfoDocument(String id, String value) throws IOException {
XContentBuilder infoDoc = JsonXContent.contentBuilder().startObject();
infoDoc.field("value", value);
infoDoc.endObject();
// Only create the first version so we know how many documents are created when the index is first created
- Request request = new Request("PUT", "/info/" + this.type + "/" + index + "_" + type);
+ Request request = new Request("PUT", "/info/" + type + "/" + id);
request.addParameter("op_type", "create");
request.setJsonEntity(Strings.toString(infoDoc));
- if (isRunningAgainstAncientCluster() == false) {
- request.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
- }
client().performRequest(request);
}
- private String loadInfoDocument(String type) throws IOException {
- Request request = new Request("GET", "/info/" + this.type + "/" + index + "_" + type);
+ private String loadInfoDocument(String id) throws IOException {
+ Request request = new Request("GET", "/info/_doc/" + id);
request.addParameter("filter_path", "_source");
String doc = toStr(client().performRequest(request));
Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc);
@@ -1263,10 +1213,6 @@ private String loadInfoDocument(String type) throws IOException {
return m.group(1);
}
- private Object randomLenientBoolean() {
- return randomFrom(new Object[] {"off", "no", "0", 0, "false", false, "on", "yes", "1", 1, "true", true});
- }
-
private void refresh() throws IOException {
logger.debug("Refreshing [{}]", index);
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
@@ -1337,7 +1283,9 @@ public void testOperationBasedRecovery() throws Exception {
final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1);
- settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ if (minimumNodeVersion().before(Version.V_2_0_0)) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ }
createIndex(index, settings.build());
ensureGreen(index);
int committedDocs = randomIntBetween(100, 200);
@@ -1392,7 +1340,9 @@ public void testRecoveryWithTranslogRetentionDisabled() throws Exception {
final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1);
- settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ if (minimumNodeVersion().before(Version.V_2_0_0)) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ }
if (randomBoolean()) {
settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1");
}
@@ -1415,7 +1365,7 @@ public void testRecoveryWithTranslogRetentionDisabled() throws Exception {
if (randomBoolean()) {
flush(index, randomBoolean());
} else if (randomBoolean()) {
- performSyncedFlush(index, randomBoolean());
+ syncedFlush(index, randomBoolean());
}
saveInfoDocument("doc_count", Integer.toString(numDocs));
}
@@ -1424,6 +1374,72 @@ public void testRecoveryWithTranslogRetentionDisabled() throws Exception {
assertTotalHits(numDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))));
}
+ public void testResize() throws Exception {
+ int numDocs;
+ if (isRunningAgainstOldCluster()) {
+ final Settings.Builder settings = Settings.builder()
+ .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 3)
+ .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1);
+ if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false);
+ }
+ final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null;
+ createIndex(index, settings.build(), mappings);
+ numDocs = randomIntBetween(10, 1000);
+ for (int i = 0; i < numDocs; i++) {
+ indexDocument(Integer.toString(i));
+ if (rarely()) {
+ flush(index, randomBoolean());
+ }
+ }
+ saveInfoDocument("num_doc_" + index, Integer.toString(numDocs));
+ ensureGreen(index);
+ } else {
+ ensureGreen(index);
+ numDocs = Integer.parseInt(loadInfoDocument("num_doc_" + index));
+ int moreDocs = randomIntBetween(0, 100);
+ for (int i = 0; i < moreDocs; i++) {
+ indexDocument(Integer.toString(numDocs + i));
+ if (rarely()) {
+ flush(index, randomBoolean());
+ }
+ }
+ Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings");
+ updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}");
+ client().performRequest(updateSettingsRequest);
+ {
+ final String target = index + "_shrunken";
+ Request shrinkRequest = new Request("PUT", "/" + index + "/_shrink/" + target);
+ Settings.Builder settings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1);
+ if (randomBoolean()) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
+ }
+ shrinkRequest.setJsonEntity("{\"settings\":" + Strings.toString(settings.build()) + "}");
+ client().performRequest(shrinkRequest);
+ ensureGreenLongWait(target);
+ assertNumHits(target, numDocs + moreDocs, 1);
+ }
+ {
+ final String target = index + "_split";
+ Settings.Builder settings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 6);
+ if (randomBoolean()) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
+ }
+ Request splitRequest = new Request("PUT", "/" + index + "/_split/" + target);
+ splitRequest.setJsonEntity("{\"settings\":" + Strings.toString(settings.build()) + "}");
+ client().performRequest(splitRequest);
+ ensureGreenLongWait(target);
+ assertNumHits(target, numDocs + moreDocs, 6);
+ }
+ {
+ final String target = index + "_cloned";
+ client().performRequest(new Request("PUT", "/" + index + "/_clone/" + target));
+ ensureGreenLongWait(target);
+ assertNumHits(target, numDocs + moreDocs, 3);
+ }
+ }
+ }
+
@SuppressWarnings("unchecked")
public void testSystemIndexMetadataIsUpgraded() throws Exception {
final String systemIndexWarning = "this request accesses system indices: [.tasks], but in a future major version, direct " +
@@ -1436,11 +1452,7 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception {
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "true");
- bulk.setJsonEntity("{\"index\": {\"_index\": \"test_index_old\", \"_type\" : \"" + type + "\"}}\n" +
- "{\"f1\": \"v1\", \"f2\": \"v2\"}\n");
- if (isRunningAgainstAncientCluster() == false) {
- bulk.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
- }
+ bulk.setJsonEntity("{\"index\": {\"_index\": \"test_index_old\"}}\n" + "{\"f1\": \"v1\", \"f2\": \"v2\"}\n");
client().performRequest(bulk);
// start a async reindex job
@@ -1529,16 +1541,17 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception {
}
public void testEnableSoftDeletesOnRestore() throws Exception {
+ assumeTrue("soft deletes must be enabled on 2.0+", getOldClusterVersion().before(Version.V_2_0_0));
final String snapshot = "snapshot-" + index;
if (isRunningAgainstOldCluster()) {
final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1);
- settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false);
createIndex(index, settings.build());
ensureGreen(index);
int numDocs = randomIntBetween(0, 100);
- indexRandomDocuments(numDocs, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
+ indexRandomDocuments(numDocs, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject());
// create repo
XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject();
{
@@ -1592,7 +1605,7 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception {
createIndex(index, settings.build());
ensureGreen(index);
int numDocs = randomIntBetween(0, 100);
- indexRandomDocuments(numDocs, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
+ indexRandomDocuments(numDocs, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject());
// create repo
XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject();
{
diff --git a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java
index 8e21998b50525..b133a6462a525 100644
--- a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java
+++ b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java
@@ -35,25 +35,29 @@
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.client.Request;
+import org.opensearch.client.RequestOptions;
import org.opensearch.client.Response;
+import org.opensearch.client.ResponseException;
import org.opensearch.client.RestClient;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.json.JsonXContent;
+import org.opensearch.common.xcontent.support.XContentMapValues;
import org.opensearch.index.seqno.SeqNoStats;
-import org.opensearch.rest.action.document.RestGetAction;
-import org.opensearch.rest.action.document.RestIndexAction;
+import org.opensearch.rest.RestStatus;
import org.opensearch.test.rest.OpenSearchRestTestCase;
import org.opensearch.test.rest.yaml.ObjectPath;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
+import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
public class IndexingIT extends OpenSearchRestTestCase {
@@ -61,9 +65,8 @@ public class IndexingIT extends OpenSearchRestTestCase {
private int indexDocs(String index, final int idStart, final int numDocs) throws IOException {
for (int i = 0; i < numDocs; i++) {
final int id = idStart + i;
- Request request = new Request("PUT", index + "/doc/" + id);
+ Request request = new Request("PUT", index + "/_doc/" + id);
request.setJsonEntity("{\"test\": \"test_" + randomAlphaOfLength(2) + "\"}");
- request.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
assertOK(client().performRequest(request));
}
return numDocs;
@@ -295,6 +298,59 @@ public void testUpdateSnapshotStatus() throws Exception {
request.setJsonEntity("{\"indices\": \"" + index + "\"}");
}
+ public void testSyncedFlushTransition() throws Exception {
+ Nodes nodes = buildNodeAndVersions();
+ assumeTrue("bwc version is on 1.x or Legacy 7.x", nodes.getBWCVersion().before(Version.V_2_0_0));
+ assumeFalse("no new node found", nodes.getNewNodes().isEmpty());
+ assumeFalse("no bwc node found", nodes.getBWCNodes().isEmpty());
+ // Allocate shards to new nodes then verify synced flush requests processed by old nodes/new nodes
+ String newNodes = nodes.getNewNodes().stream().map(Node::getNodeName).collect(Collectors.joining(","));
+ int numShards = randomIntBetween(1, 10);
+ int numOfReplicas = randomIntBetween(0, nodes.getNewNodes().size() - 1);
+ int totalShards = numShards * (numOfReplicas + 1);
+ final String index = "test_synced_flush";
+ createIndex(index, Settings.builder()
+ .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), numShards)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numOfReplicas)
+ .put("index.routing.allocation.include._name", newNodes).build());
+ ensureGreen(index);
+ indexDocs(index, randomIntBetween(0, 100), between(1, 100));
+ try (RestClient oldNodeClient = buildClient(restClientSettings(),
+ nodes.getBWCNodes().stream().map(Node::getPublishAddress).toArray(HttpHost[]::new))) {
+ Request request = new Request("POST", index + "/_flush/synced");
+ assertBusy(() -> {
+ ResponseException responseException = expectThrows(ResponseException.class, () -> oldNodeClient.performRequest(request));
+ assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.CONFLICT.getStatus()));
+ assertThat(responseException.getResponse().getWarnings(),
+ contains("Synced flush is deprecated and will be removed in 8.0. Use flush at _/flush or /{index}/_flush instead."));
+ Map result = ObjectPath.createFromResponse(responseException.getResponse()).evaluate("_shards");
+ assertThat(result.get("total"), equalTo(totalShards));
+ assertThat(result.get("successful"), equalTo(0));
+ assertThat(result.get("failed"), equalTo(totalShards));
+ });
+ Map stats = entityAsMap(client().performRequest(new Request("GET", index + "/_stats?level=shards")));
+ assertThat(XContentMapValues.extractValue("indices." + index + ".total.translog.uncommitted_operations", stats), equalTo(0));
+ }
+ indexDocs(index, randomIntBetween(0, 100), between(1, 100));
+ try (RestClient newNodeClient = buildClient(restClientSettings(),
+ nodes.getNewNodes().stream().map(Node::getPublishAddress).toArray(HttpHost[]::new))) {
+ Request request = new Request("POST", index + "/_flush/synced");
+ List warningMsg = Arrays.asList("Synced flush was removed and a normal flush was performed instead. " +
+ "This transition will be removed in a future version.");
+ RequestOptions.Builder requestOptionsBuilder = RequestOptions.DEFAULT.toBuilder();
+ requestOptionsBuilder.setWarningsHandler(warnings -> warnings.equals(warningMsg) == false);
+ request.setOptions(requestOptionsBuilder);
+ assertBusy(() -> {
+ Map result = ObjectPath.createFromResponse(newNodeClient.performRequest(request)).evaluate("_shards");
+ assertThat(result.get("total"), equalTo(totalShards));
+ assertThat(result.get("successful"), equalTo(totalShards));
+ assertThat(result.get("failed"), equalTo(0));
+ });
+ Map stats = entityAsMap(client().performRequest(new Request("GET", index + "/_stats?level=shards")));
+ assertThat(XContentMapValues.extractValue("indices." + index + ".total.translog.uncommitted_operations", stats), equalTo(0));
+ }
+ }
+
private void assertCount(final String index, final String preference, final int expectedCount) throws IOException {
Request request = new Request("GET", index + "/_count");
request.addParameter("preference", preference);
@@ -305,9 +361,8 @@ private void assertCount(final String index, final String preference, final int
}
private void assertVersion(final String index, final int docId, final String preference, final int expectedVersion) throws IOException {
- Request request = new Request("GET", index + "/doc/" + docId);
+ Request request = new Request("GET", index + "/_doc/" + docId);
request.addParameter("preference", preference);
- request.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
final Response response = client().performRequest(request);
assertOK(response);
diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml
index 35d9c02e7e362..f8a31c5ec9214 100644
--- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml
+++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml
@@ -96,10 +96,6 @@
---
"skip_unavailable is returned as part of _remote/info response":
- - skip:
- version: " - 6.0.99"
- reason: "skip_unavailable is only returned from 6.1.0 on"
-
- do:
cluster.get_settings:
include_defaults: true
diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/60_tophits.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/60_tophits.yml
index 9d94e7d5abb3f..cc75ce692e6bf 100644
--- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/60_tophits.yml
+++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/60_tophits.yml
@@ -24,7 +24,7 @@ teardown:
bulk:
refresh: true
body:
- - '{"index": {"_index": "single_doc_index", "_type": "test_type"}}'
+ - '{"index": {"_index": "single_doc_index"}}'
- '{"f1": "local_cluster", "sort_field": 0}'
- do:
search:
diff --git a/qa/os/src/test/java/org/opensearch/packaging/test/ArchiveTests.java b/qa/os/src/test/java/org/opensearch/packaging/test/ArchiveTests.java
index e5464e8ee8d28..898ea12b6a6c3 100644
--- a/qa/os/src/test/java/org/opensearch/packaging/test/ArchiveTests.java
+++ b/qa/os/src/test/java/org/opensearch/packaging/test/ArchiveTests.java
@@ -85,6 +85,7 @@ public void test20PluginsListWithNoPlugins() throws Exception {
public void test30MissingBundledJdk() throws Exception {
final Installation.Executables bin = installation.executables();
sh.getEnv().remove("JAVA_HOME");
+ sh.getEnv().remove("OPENSEARCH_JAVA_HOME");
final Path relocatedJdk = installation.bundledJdk.getParent().resolve("jdk.relocated");
@@ -105,6 +106,7 @@ public void test30MissingBundledJdk() throws Exception {
public void test31BadJavaHome() throws Exception {
final Installation.Executables bin = installation.executables();
+ sh.getEnv().remove("OPENSEARCH_JAVA_HOME");
sh.getEnv().put("JAVA_HOME", "doesnotexist");
// ask for opensearch version to quickly exit if java is actually found (ie test failure)
@@ -114,11 +116,23 @@ public void test31BadJavaHome() throws Exception {
}
+ public void test31BadOpensearchJavaHome() throws Exception {
+ final Installation.Executables bin = installation.executables();
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", "doesnotexist");
+
+ // ask for opensearch version to quickly exit if java is actually found (ie test failure)
+ final Result runResult = sh.runIgnoreExitCode(bin.opensearch.toString() + " -V");
+ assertThat(runResult.exitCode, is(1));
+ assertThat(runResult.stderr, containsString("could not find java in OPENSEARCH_JAVA_HOME"));
+
+ }
+
public void test32SpecialCharactersInJdkPath() throws Exception {
final Installation.Executables bin = installation.executables();
assumeTrue("Only run this test when we know where the JDK is.", distribution().hasJdk);
final Path relocatedJdk = installation.bundledJdk.getParent().resolve("a (special) path");
+ sh.getEnv().remove("OPENSEARCH_JAVA_HOME");
sh.getEnv().put("JAVA_HOME", relocatedJdk.toString());
try {
@@ -154,6 +168,8 @@ public void test50StartAndStop() throws Exception {
}
public void test51JavaHomeOverride() throws Exception {
+ sh.getEnv().remove("OPENSEARCH_JAVA_HOME");
+
Platforms.onLinux(() -> {
String systemJavaHome1 = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim();
sh.getEnv().put("JAVA_HOME", systemJavaHome1);
@@ -171,8 +187,29 @@ public void test51JavaHomeOverride() throws Exception {
assertThat(FileUtils.slurpAllLogs(installation.logs, "opensearch.log", "*.log.gz"), containsString(systemJavaHome1));
}
- public void test52BundledJdkRemoved() throws Exception {
+ public void test51OpensearchJavaHomeOverride() throws Exception {
+ Platforms.onLinux(() -> {
+ String systemJavaHome1 = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim();
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", systemJavaHome1);
+ sh.getEnv().put("JAVA_HOME", "doesnotexist");
+ });
+ Platforms.onWindows(() -> {
+ final String systemJavaHome1 = sh.run("$Env:SYSTEM_JAVA_HOME").stdout.trim();
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", systemJavaHome1);
+ sh.getEnv().put("JAVA_HOME", "doesnotexist");
+ });
+
+ startOpenSearch();
+ ServerUtils.runOpenSearchTests();
+ stopOpenSearch();
+
+ String systemJavaHome1 = sh.getEnv().get("OPENSEARCH_JAVA_HOME");
+ assertThat(FileUtils.slurpAllLogs(installation.logs, "opensearch.log", "*.log.gz"), containsString(systemJavaHome1));
+ }
+
+ public void test52JavaHomeBundledJdkRemoved() throws Exception {
assumeThat(distribution().hasJdk, is(true));
+ sh.getEnv().remove("OPENSEARCH_JAVA_HOME");
Path relocatedJdk = installation.bundledJdk.getParent().resolve("jdk.relocated");
try {
@@ -197,7 +234,37 @@ public void test52BundledJdkRemoved() throws Exception {
}
}
+ public void test52OpensearchJavaHomeBundledJdkRemoved() throws Exception {
+ assumeThat(distribution().hasJdk, is(true));
+
+ Path relocatedJdk = installation.bundledJdk.getParent().resolve("jdk.relocated");
+ try {
+ mv(installation.bundledJdk, relocatedJdk);
+ Platforms.onLinux(() -> {
+ String systemJavaHome1 = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim();
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", systemJavaHome1);
+ sh.getEnv().put("JAVA_HOME", "doesnotexist");
+ });
+ Platforms.onWindows(() -> {
+ final String systemJavaHome1 = sh.run("$Env:SYSTEM_JAVA_HOME").stdout.trim();
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", systemJavaHome1);
+ sh.getEnv().put("JAVA_HOME", "doesnotexist");
+ });
+
+ startOpenSearch();
+ ServerUtils.runOpenSearchTests();
+ stopOpenSearch();
+
+ String systemJavaHome1 = sh.getEnv().get("OPENSEARCH_JAVA_HOME");
+ assertThat(FileUtils.slurpAllLogs(installation.logs, "opensearch.log", "*.log.gz"), containsString(systemJavaHome1));
+ } finally {
+ mv(relocatedJdk, installation.bundledJdk);
+ }
+ }
+
public void test53JavaHomeWithSpecialCharacters() throws Exception {
+ sh.getEnv().remove("OPENSEARCH_JAVA_HOME");
+
Platforms.onWindows(() -> {
String javaPath = "C:\\Program Files (x86)\\java";
try {
@@ -250,6 +317,7 @@ public void test54ForceBundledJdkEmptyJavaHome() throws Exception {
// cleanup from previous test
rm(installation.config("opensearch.keystore"));
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", "");
sh.getEnv().put("JAVA_HOME", "");
startOpenSearch();
diff --git a/qa/os/src/test/java/org/opensearch/packaging/test/PackagingTestCase.java b/qa/os/src/test/java/org/opensearch/packaging/test/PackagingTestCase.java
index 3294ffb3bbe45..259ae6e766c8e 100644
--- a/qa/os/src/test/java/org/opensearch/packaging/test/PackagingTestCase.java
+++ b/qa/os/src/test/java/org/opensearch/packaging/test/PackagingTestCase.java
@@ -33,6 +33,7 @@
package org.opensearch.packaging.test;
import com.carrotsearch.randomizedtesting.JUnit3MethodProvider;
+import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.RandomizedRunner;
import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering;
import com.carrotsearch.randomizedtesting.annotations.TestGroup;
@@ -182,11 +183,19 @@ public void setup() throws Exception {
sh.reset();
if (distribution().hasJdk == false) {
- Platforms.onLinux(() -> sh.getEnv().put("JAVA_HOME", systemJavaHome));
- Platforms.onWindows(() -> sh.getEnv().put("JAVA_HOME", systemJavaHome));
+ // Randomly switch between JAVA_HOME and OPENSEARCH_JAVA_HOME
+ final String javaHomeEnv = randomBoolean() ? "JAVA_HOME" : "OPENSEARCH_JAVA_HOME";
+ logger.info("Using " + javaHomeEnv);
+
+ Platforms.onLinux(() -> sh.getEnv().put(javaHomeEnv, systemJavaHome));
+ Platforms.onWindows(() -> sh.getEnv().put(javaHomeEnv, systemJavaHome));
}
}
+ private boolean randomBoolean() {
+ return RandomizedContext.current().getRandom().nextBoolean();
+ }
+
@After
public void teardown() throws Exception {
if (installation != null && failed == false) {
diff --git a/qa/os/src/test/java/org/opensearch/packaging/test/WindowsServiceTests.java b/qa/os/src/test/java/org/opensearch/packaging/test/WindowsServiceTests.java
index 50540f3ac5233..57ea853e735a9 100644
--- a/qa/os/src/test/java/org/opensearch/packaging/test/WindowsServiceTests.java
+++ b/qa/os/src/test/java/org/opensearch/packaging/test/WindowsServiceTests.java
@@ -149,12 +149,20 @@ public void test13InstallMissingBundledJdk() throws IOException {
}
public void test14InstallBadJavaHome() throws IOException {
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", "");
sh.getEnv().put("JAVA_HOME", "doesnotexist");
Result result = sh.runIgnoreExitCode(serviceScript + " install");
assertThat(result.exitCode, equalTo(1));
assertThat(result.stderr, containsString("could not find java in JAVA_HOME"));
}
+ public void test14InstallBadOpensearchJavaHome() throws IOException {
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", "doesnotexist");
+ Result result = sh.runIgnoreExitCode(serviceScript + " install");
+ assertThat(result.exitCode, equalTo(1));
+ assertThat(result.stderr, containsString("could not find java in OPENSEARCH_JAVA_HOME"));
+ }
+
public void test15RemoveNotInstalled() {
Result result = assertFailure(serviceScript + " remove", 1);
assertThat(result.stdout, containsString("Failed removing '" + DEFAULT_ID + "' service"));
@@ -163,6 +171,7 @@ public void test15RemoveNotInstalled() {
public void test16InstallSpecialCharactersInJdkPath() throws IOException {
assumeTrue("Only run this test when we know where the JDK is.", distribution().hasJdk);
final Path relocatedJdk = installation.bundledJdk.getParent().resolve("a (special) jdk");
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", "");
sh.getEnv().put("JAVA_HOME", relocatedJdk.toString());
try {
@@ -248,6 +257,7 @@ public void test32StopNotStarted() throws IOException {
public void test33JavaChanged() throws Exception {
final Path relocatedJdk = installation.bundledJdk.getParent().resolve("jdk.relocated");
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", "");
try {
mv(installation.bundledJdk, relocatedJdk);
@@ -261,6 +271,22 @@ public void test33JavaChanged() throws Exception {
}
}
+ public void test33OpensearchJavaChanged() throws Exception {
+ final Path relocatedJdk = installation.bundledJdk.getParent().resolve("jdk.relocated");
+ sh.getEnv().put("JAVA_HOME", "");
+
+ try {
+ mv(installation.bundledJdk, relocatedJdk);
+ sh.getEnv().put("OPENSEARCH_JAVA_HOME", relocatedJdk.toString());
+ assertCommand(serviceScript + " install");
+ sh.getEnv().remove("OPENSEARCH_JAVA_HOME");
+ assertCommand(serviceScript + " start");
+ assertStartedAndStop();
+ } finally {
+ mv(relocatedJdk, installation.bundledJdk);
+ }
+ }
+
public void test60Manager() throws IOException {
Path serviceMgr = installation.bin("opensearch-service-mgr.exe");
Path tmpServiceMgr = serviceMgr.getParent().resolve(serviceMgr.getFileName() + ".tmp");
diff --git a/qa/os/src/test/java/org/opensearch/packaging/util/Cleanup.java b/qa/os/src/test/java/org/opensearch/packaging/util/Cleanup.java
index 6bd7f07320350..d18c0d8d7cca1 100644
--- a/qa/os/src/test/java/org/opensearch/packaging/util/Cleanup.java
+++ b/qa/os/src/test/java/org/opensearch/packaging/util/Cleanup.java
@@ -75,16 +75,14 @@ public static void cleanEverything() throws Exception {
sh.runIgnoreExitCode("ps aux | grep -i 'org.opensearch.bootstrap.OpenSearch' | awk {'print $2'} | xargs kill -9");
});
- Platforms.onWindows(
- () -> {
- // the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here
- sh.runIgnoreExitCode(
- "Get-WmiObject Win32_Process | "
- + "Where-Object { $_.CommandLine -Match 'org.opensearch.bootstrap.OpenSearch' } | "
- + "ForEach-Object { $_.Terminate() }"
- );
- }
- );
+ Platforms.onWindows(() -> {
+ // the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here
+ sh.runIgnoreExitCode(
+ "Get-WmiObject Win32_Process | "
+ + "Where-Object { $_.CommandLine -Match 'org.opensearch.bootstrap.OpenSearch' } | "
+ + "ForEach-Object { $_.Terminate() }"
+ );
+ });
Platforms.onLinux(Cleanup::purgePackagesLinux);
diff --git a/qa/os/src/test/java/org/opensearch/packaging/util/ServerUtils.java b/qa/os/src/test/java/org/opensearch/packaging/util/ServerUtils.java
index dcc6829eb4143..d92feec21daaf 100644
--- a/qa/os/src/test/java/org/opensearch/packaging/util/ServerUtils.java
+++ b/qa/os/src/test/java/org/opensearch/packaging/util/ServerUtils.java
@@ -198,12 +198,12 @@ public static void waitForOpenSearch(String status, String index, Installation i
public static void runOpenSearchTests() throws Exception {
makeRequest(
- Request.Post("http://localhost:9200/library/book/1?refresh=true&pretty")
+ Request.Post("http://localhost:9200/library/_doc/1?refresh=true&pretty")
.bodyString("{ \"title\": \"Book #1\", \"pages\": 123 }", ContentType.APPLICATION_JSON)
);
makeRequest(
- Request.Post("http://localhost:9200/library/book/2?refresh=true&pretty")
+ Request.Post("http://localhost:9200/library/_doc/2?refresh=true&pretty")
.bodyString("{ \"title\": \"Book #2\", \"pages\": 456 }", ContentType.APPLICATION_JSON)
);
diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java
index 79745b1cc2f95..f34e5f7bc121a 100644
--- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java
+++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java
@@ -205,12 +205,11 @@ public void testAutoIdWithOpTypeCreate() throws IOException {
private void bulk(String index, String valueSuffix, int count) throws IOException {
StringBuilder b = new StringBuilder();
for (int i = 0; i < count; i++) {
- b.append("{\"index\": {\"_index\": \"").append(index).append("\", \"_type\": \"_doc\"}}\n");
+ b.append("{\"index\": {\"_index\": \"").append(index).append("\"}}\n");
b.append("{\"f1\": \"v").append(i).append(valueSuffix).append("\", \"f2\": ").append(i).append("}\n");
}
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "true");
- bulk.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
bulk.setJsonEntity(b.toString());
client().performRequest(bulk);
}
diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java
index 5507a5a221473..687fd1743c3d3 100644
--- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java
+++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java
@@ -48,9 +48,6 @@
import org.opensearch.common.xcontent.support.XContentMapValues;
import org.opensearch.index.IndexSettings;
import org.opensearch.rest.RestStatus;
-import org.opensearch.rest.action.document.RestGetAction;
-import org.opensearch.rest.action.document.RestIndexAction;
-import org.opensearch.rest.action.document.RestUpdateAction;
import org.opensearch.test.rest.yaml.ObjectPath;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
@@ -67,7 +64,7 @@
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
-import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiLettersOfLength;
import static org.opensearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
import static org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING;
import static org.opensearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY;
@@ -123,9 +120,8 @@ public void testHistoryUUIDIsGenerated() throws Exception {
private int indexDocs(String index, final int idStart, final int numDocs) throws IOException {
for (int i = 0; i < numDocs; i++) {
final int id = idStart + i;
- Request indexDoc = new Request("PUT", index + "/test/" + id);
- indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}");
- indexDoc.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
+ Request indexDoc = new Request("PUT", index + "/_doc/" + id);
+ indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiLettersOfLength(2) + "\"}");
client().performRequest(indexDoc);
}
return numDocs;
@@ -322,13 +318,13 @@ public void testRelocationWithConcurrentIndexing() throws Exception {
throw new IllegalStateException("unknown type " + CLUSTER_TYPE);
}
if (randomBoolean()) {
- performSyncedFlush(index, randomBoolean());
+ syncedFlush(index, randomBoolean());
ensureGlobalCheckpointSynced(index);
}
}
public void testRecovery() throws Exception {
- final String index = "recover_with_soft_deletes";
+ final String index = "test_recovery";
if (CLUSTER_TYPE == ClusterType.OLD) {
Settings.Builder settings = Settings.builder()
.put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1)
@@ -339,7 +335,7 @@ public void testRecovery() throws Exception {
// before timing out
.put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")
.put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster
- if (randomBoolean()) {
+ if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
}
createIndex(index, settings.build());
@@ -360,6 +356,9 @@ public void testRecovery() throws Exception {
}
}
}
+ if (randomBoolean()) {
+ syncedFlush(index, randomBoolean());
+ }
ensureGreen(index);
}
@@ -370,8 +369,10 @@ public void testRetentionLeasesEstablishedWhenPromotingPrimary() throws Exceptio
.put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), between(1, 5))
.put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), between(1, 2)) // triggers nontrivial promotion
.put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")
- .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0") // fail faster
- .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster
+ if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ }
createIndex(index, settings.build());
int numDocs = randomInt(10);
indexDocs(index, 0, numDocs);
@@ -391,8 +392,10 @@ public void testRetentionLeasesEstablishedWhenRelocatingPrimary() throws Excepti
.put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), between(1, 5))
.put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), between(0, 1))
.put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")
- .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0") // fail faster
- .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster
+ if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ }
createIndex(index, settings.build());
int numDocs = randomInt(10);
indexDocs(index, 0, numDocs);
@@ -652,8 +655,7 @@ public void testUpdateDoc() throws Exception {
final int times = randomIntBetween(0, 2);
for (int i = 0; i < times; i++) {
long value = randomNonNegativeLong();
- Request update = new Request("POST", index + "/test/" + docId + "/_update");
- update.setOptions(expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE));
+ Request update = new Request("POST", index + "/_update/" + docId);
update.setJsonEntity("{\"doc\": {\"updated_field\": " + value + "}}");
client().performRequest(update);
updates.put(docId, value);
@@ -661,13 +663,12 @@ public void testUpdateDoc() throws Exception {
}
client().performRequest(new Request("POST", index + "/_refresh"));
for (int docId : updates.keySet()) {
- Request get = new Request("GET", index + "/test/" + docId);
- get.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
+ Request get = new Request("GET", index + "/_doc/" + docId);
Map doc = entityAsMap(client().performRequest(get));
assertThat(XContentMapValues.extractValue("_source.updated_field", doc), equalTo(updates.get(docId)));
}
if (randomBoolean()) {
- performSyncedFlush(index, randomBoolean());
+ syncedFlush(index, randomBoolean());
ensureGlobalCheckpointSynced(index);
}
}
@@ -713,10 +714,13 @@ private void assertNoopRecoveries(String indexName, Predicate targetNode
public void testOperationBasedRecovery() throws Exception {
final String index = "test_operation_based_recovery";
if (CLUSTER_TYPE == ClusterType.OLD) {
- createIndex(index, Settings.builder()
+ final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1)
- .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2)
- .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()).build());
+ .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2);
+ if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
+ settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
+ }
+ createIndex(index, settings.build());
ensureGreen(index);
indexDocs(index, 0, randomIntBetween(100, 200));
flush(index, randomBoolean());
@@ -791,7 +795,7 @@ public void testSoftDeletesDisabledWarning() throws Exception {
if (CLUSTER_TYPE == ClusterType.OLD) {
boolean softDeletesEnabled = true;
Settings.Builder settings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1);
- if (randomBoolean()) {
+ if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
softDeletesEnabled = randomBoolean();
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), softDeletesEnabled);
}
diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml
index 83df474d70d89..89992eeba616f 100644
--- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml
+++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml
@@ -80,9 +80,6 @@
---
"Create index with java style index in 6":
- - skip:
- version: " - 6.7.99, 7.0.0 -"
- reason: java.time patterns are allowed since 6.8
- do:
indices.create:
index: java_for_range
diff --git a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml
index 5a3f64151f4ed..7a0cdcbef0786 100644
--- a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml
+++ b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml
@@ -54,7 +54,6 @@
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar"
@@ -74,7 +73,6 @@
catch: /There are no ingest nodes in this cluster, unable to forward request to an ingest node./
index:
index: test
- type: test
id: 1
pipeline: "my_pipeline_1"
body: {
@@ -92,12 +90,10 @@
body:
- index:
_index: test_index
- _type: test_type
_id: test_id
- f1: v1
- index:
_index: test_index
- _type: test_type
_id: test_id2
- f1: v2
@@ -109,12 +105,10 @@
body:
- index:
_index: test_index
- _type: test_type
_id: test_id
- f1: v1
- index:
_index: test_index
- _type: test_type
_id: test_id2
pipeline: my_pipeline_1
- f1: v2
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/IngestDocumentMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/IngestDocumentMustacheIT.java
index 5b2468b6304b1..83643f3217720 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/IngestDocumentMustacheIT.java
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/IngestDocumentMustacheIT.java
@@ -46,7 +46,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase {
public void testAccessMetadataViaTemplate() {
Map document = new HashMap<>();
document.put("foo", "bar");
- IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document);
+ IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, document);
ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{foo}}", scriptService));
assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 bar"));
@@ -61,7 +61,7 @@ public void testAccessMapMetadataViaTemplate() {
innerObject.put("baz", "hello baz");
innerObject.put("qux", Collections.singletonMap("fubar", "hello qux and fubar"));
document.put("foo", innerObject);
- IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document);
+ IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, document);
ingestDocument.setFieldValue(compile("field1"),
ValueSource.wrap("1 {{foo.bar}} {{foo.baz}} {{foo.qux.fubar}}", scriptService));
assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 hello bar hello baz hello qux and fubar"));
@@ -80,7 +80,7 @@ public void testAccessListMetadataViaTemplate() {
list.add(value);
list.add(null);
document.put("list2", list);
- IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document);
+ IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, document);
ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{list1.0}} {{list2.0}}", scriptService));
assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 foo {field=value}"));
}
@@ -90,7 +90,7 @@ public void testAccessIngestMetadataViaTemplate() {
Map ingestMap = new HashMap<>();
ingestMap.put("timestamp", "bogus_timestamp");
document.put("_ingest", ingestMap);
- IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document);
+ IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, document);
ingestDocument.setFieldValue(compile("ingest_timestamp"),
ValueSource.wrap("{{_ingest.timestamp}} and {{_source._ingest.timestamp}}", scriptService));
assertThat(ingestDocument.getFieldValue("ingest_timestamp", String.class),
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/ValueSourceMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/ValueSourceMustacheIT.java
index 83641cca6156a..2804c73032f1b 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/ValueSourceMustacheIT.java
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/ValueSourceMustacheIT.java
@@ -77,7 +77,7 @@ public void testValueSourceWithTemplates() {
}
public void testAccessSourceViaTemplate() {
- IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, null, new HashMap<>());
+ IngestDocument ingestDocument = new IngestDocument("marvel", "id", null, null, null, new HashMap<>());
assertThat(ingestDocument.hasField("marvel"), is(false));
ingestDocument.setFieldValue(compile("{{_index}}"), ValueSource.wrap("{{_index}}", scriptService));
assertThat(ingestDocument.getFieldValue("marvel", String.class), equalTo("marvel"));
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml
index 5ffaebdcaef36..e6a2a3d52e116 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml
@@ -14,13 +14,13 @@
{
"set" : {
"field" : "index_type_id",
- "value": "{{_index}}/{{_type}}/{{_id}}"
+ "value": "{{_index}}/{{_id}}"
}
},
{
"append" : {
"field" : "metadata",
- "value": ["{{_index}}", "{{_type}}", "{{_id}}"]
+ "value": ["{{_index}}", "{{_id}}"]
}
}
]
@@ -30,7 +30,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "my_pipeline_1"
body: {}
@@ -38,11 +37,10 @@
- do:
get:
index: test
- type: test
id: 1
- length: { _source: 2 }
- - match: { _source.index_type_id: "test/test/1" }
- - match: { _source.metadata: ["test", "test", "1"] }
+ - match: { _source.index_type_id: "test/1" }
+ - match: { _source.metadata: ["test", "1"] }
---
"Test templating":
@@ -110,7 +108,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "my_pipeline_1"
body: {
@@ -123,7 +120,6 @@
- do:
get:
index: test
- type: test
id: 1
- length: { _source: 5 }
- match: { _source.field1: "1" }
@@ -135,7 +131,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "my_pipeline_2"
body: {
@@ -145,7 +140,6 @@
- do:
get:
index: test
- type: test
id: 1
- length: { _source: 2 }
- match: { _source.field1: "field2" }
@@ -154,7 +148,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "my_pipeline_3"
body: {
@@ -165,7 +158,6 @@
- do:
get:
index: test
- type: test
id: 1
- length: { _source: 1 }
- match: { _source.field_to_remove: "field2" }
@@ -204,7 +196,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "my_handled_pipeline"
body: {
@@ -214,7 +205,6 @@
- do:
get:
index: test
- type: test
id: 1
- length: { _source: 2 }
- match: { _source.do_nothing: "foo" }
@@ -246,7 +236,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "_id"
body: {
@@ -268,7 +257,6 @@
- do:
get:
index: test
- type: test
id: 1
- length: { _source: 2 }
- match: { _source.values_flat: ["foo_bar", "foo_baz"] }
@@ -307,7 +295,6 @@
"docs": [
{
"_index": "index",
- "_type": "type",
"_id": "id",
"_source": {
"foo": "bar"
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml
index c121d542c86b1..27f7f804ead1c 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml
@@ -1,10 +1,5 @@
---
"Test with date processor":
- - skip:
- version: " - 6.9.99"
- reason: pre-7.0.0 requires the 8 prefix for Java time formats, so would treat the format in this test as a Joda time format
- features: "warnings"
-
- do:
ingest.put_pipeline:
id: "_id"
@@ -48,7 +43,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "_id"
body: {
@@ -58,7 +52,6 @@
- do:
get:
index: test
- type: test
id: 1
- length: { _source: 13 }
- match: { _source.request: "/presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png" }
@@ -137,7 +130,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "_id"
body: {
@@ -178,7 +170,6 @@
- do:
get:
index: test
- type: test
id: 1
- length: { _source: 11 }
- is_false: _source.friends.0.id
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml
index 18929c47a4027..5eedae174eaa9 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml
@@ -18,7 +18,6 @@
- do:
index:
index: twitter
- type: _doc
id: 1
body: { "user": "foobar" }
- do:
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml
index e34ca43a62282..eb1c0d2eef7c5 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml
@@ -18,7 +18,6 @@
- do:
index:
index: twitter
- type: tweet
id: 1
body: { "user": "foobar" }
- do:
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml
index cce388c2ff737..eaf6b24030a06 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml
@@ -22,7 +22,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "my_pipeline"
body: { bytes_in: 1234, bytes_out: 4321 }
@@ -30,7 +29,6 @@
- do:
get:
index: test
- type: test
id: 1
- match: { _source.bytes_in: 1234 }
- match: { _source.bytes_out: 4321 }
@@ -72,7 +70,6 @@
- do:
index:
index: test
- type: test
id: 1
pipeline: "my_pipeline"
body: { bytes_in: 1234, bytes_out: 4321 }
@@ -80,7 +77,6 @@
- do:
get:
index: test
- type: test
id: 1
- match: { _source.bytes_in: 1234 }
- match: { _source.bytes_out: 4321 }
diff --git a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java
index 72400a5705162..5ae9944429d21 100644
--- a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java
+++ b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java
@@ -141,7 +141,7 @@ public void testRecoverReplica() throws Exception {
if (randomBoolean()) {
flush(index, randomBoolean());
} else if (randomBoolean()) {
- performSyncedFlush(index, randomBoolean());
+ syncedFlush(index, randomBoolean());
}
}
ensureGreen(index);
diff --git a/release-notes/opensearch.release-notes-1.2.4.md b/release-notes/opensearch.release-notes-1.2.4.md
new file mode 100644
index 0000000000000..dc2852a102c44
--- /dev/null
+++ b/release-notes/opensearch.release-notes-1.2.4.md
@@ -0,0 +1,76 @@
+## 2022-01-18 Version 1.2.4
+
+* __Update FIPS API libraries of Bouncy Castle (#1853) (#1888)__
+
+ [Tianli Feng](mailto:ftl94@live.com) - Thu, 13 Jan 2022 10:48:38 -0500
+
+ EAD -> refs/heads/1.2, tag: refs/tags/1.2.4, refs/remotes/upstream/1.2
+ * Update bc-fips to 1.0.2.1
+ Signed-off-by: Tianli Feng <ftl94@live.com>
+
+ * Update bcpg-fips to 1.0.5.1
+ Signed-off-by: Tianli Feng <ftl94@live.com>
+
+ * Update bctls-fips to 1.0.12.2
+ Signed-off-by: Tianli Feng <ftl94@live.com>
+
+ * Use the unified bouncycastle version for bcpkix-jdk15on in HDFS testing
+ fixture
+ Signed-off-by: Tianli Feng <ftl94@live.com>
+
+* __[Backport 1.2] Replace JCenter with Maven Central. (#1057) and update plugin repository order. (#1894)__
+
+ [Marc Handalian](mailto:handalm@amazon.com) - Wed, 12 Jan 2022 15:18:22 -0800
+
+
+ * Replace JCenter with Maven Central. (#1057)
+ On February 3 2021, JFrog
+ [announced](https://jfrog.com/blog/into-the-sunset-bintray-jcenter-gocenter-and-chartcenter/)
+ the shutdown of JCenter. Later on April 27 2021, an update was provided that
+ the repository will only be read only and new package and versions are no
+ longer accepted on JCenter. This means we should no longer use JCenter for our
+ central artifacts repository.
+ This change replaces JCenter with Maven Central as per the Gradle
+ recommendation - https://blog.gradle.org/jcenter-shutdown
+ Signed-off-by: Rabi Panda <adnapibar@gmail.com>
+
+ * Define plugin repositories order in settings.gradle.
+ Signed-off-by: Marc Handalian <handalm@amazon.com>
+ Co-authored-by: Rabi Panda <adnapibar@gmail.com>
+
+* __Updatting Netty to 4.1.72.Final (#1831) (#1890)__
+
+ [Sarat Vemulapalli](mailto:vemulapallisarat@gmail.com) - Wed, 12 Jan 2022 08:29:25 -0800
+
+
+ Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com>
+
+* __Upgrading bouncycastle to 1.70 (#1832) (#1889)__
+
+ [Sarat Vemulapalli](mailto:vemulapallisarat@gmail.com) - Tue, 11 Jan 2022 17:20:29 -0800
+
+
+ Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com>
+
+* __RestIntegTestTask fails because of missed log4j-core dependency (#1815) (#1819)__
+
+ [Andriy Redko](mailto:andriy.redko@aiven.io) - Tue, 28 Dec 2021 17:47:13 -0500
+
+
+ Signed-off-by: Andriy Redko <andriy.redko@aiven.io>
+
+* __Update to log4j 2.17.1 (#1820) (#1823)__
+
+ [Andriy Redko](mailto:andriy.redko@aiven.io) - Tue, 28 Dec 2021 17:46:53 -0500
+
+
+ Signed-off-by: Andriy Redko <andriy.redko@aiven.io>
+
+* __Prepare for next development iteration, 1.2.4. (#1792)__
+
+ [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Wed, 22 Dec 2021 15:11:11 -0800
+
+
+ Signed-off-by: dblock <dblock@amazon.com>
+
+
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json
index 9f6461b16d3eb..8cdb3db7c12cd 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json
@@ -27,27 +27,6 @@
"description":"A comma-separated list of indices to restrict the results"
}
}
- },
- {
- "path":"/{index}/{type}/_count",
- "methods":[
- "POST",
- "GET"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of indices to restrict the results"
- },
- "type": {
- "type" : "list",
- "description" : "A comma-separated list of types to restrict the results"
- }
- },
- "deprecated": {
- "version" : "7.0.0",
- "description" : "Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json b/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json
index 09042376a256b..fd221b474a070 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json
@@ -22,31 +22,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/{id}",
- "methods":[
- "HEAD"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"The document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document (use `_all` to fetch the first document matching the ID across all types)",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json
index 0c8d62d6d1d34..2ce77f17aff10 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json
@@ -22,31 +22,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/{id}",
- "methods":[
- "GET"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"The document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document (use `_all` to fetch the first document matching the ID across all types)",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json
index e5336059d3924..ad79678388590 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json
@@ -22,31 +22,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/{id}/_source",
- "methods":[
- "GET"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"The document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document; deprecated and optional starting with 7.0",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json
index 37f3cc9f9f82b..b4865403331b0 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json
@@ -35,53 +35,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}",
- "methods":[
- "POST"
- ],
- "parts":{
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
- },
- {
- "path":"/{index}/{type}/{id}",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "id":{
- "type":"string",
- "description":"Document ID"
- },
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush_synced.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush_synced.json
deleted file mode 100644
index 134ba93395b40..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush_synced.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "indices.flush_synced":{
- "documentation":{
- "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-synced-flush-api.html",
- "description":"Performs a synced flush operation on one or more indices. Synced flush is deprecated and will be removed in 8.0. Use flush instead"
- },
- "stability":"stable",
- "url":{
- "paths":[
- {
- "path":"/_flush/synced",
- "methods":[
- "POST",
- "GET"
- ],
- "deprecated":{
- "version":"7.6.0",
- "description":"Synced flush is deprecated and will be removed in 8.0. Use flush instead."
- }
- },
- {
- "path":"/{index}/_flush/synced",
- "methods":[
- "POST",
- "GET"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names; use `_all` or empty string for all indices"
- }
- },
- "deprecated":{
- "version":"7.6.0",
- "description":"Synced flush is deprecated and will be removed in 8.0. Use flush instead."
- }
- }
- ]
- },
- "params":{
- "ignore_unavailable":{
- "type":"boolean",
- "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)"
- },
- "allow_no_indices":{
- "type":"boolean",
- "description":"Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
- },
- "expand_wildcards":{
- "type":"enum",
- "options":[
- "open",
- "closed",
- "none",
- "all"
- ],
- "default":"open",
- "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both."
- }
- }
- }
-}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json
index 15cc48a582cc4..ee96dfcc21ccd 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json
@@ -34,52 +34,6 @@
"description":"A comma-separated list of fields"
}
}
- },
- {
- "path":"/_mapping/{type}/field/{fields}",
- "methods":[
- "GET"
- ],
- "parts":{
- "type":{
- "type":"list",
- "description":"A comma-separated list of document types",
- "deprecated":true
- },
- "fields":{
- "type":"list",
- "description":"A comma-separated list of fields"
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
- },
- {
- "path":"/{index}/_mapping/{type}/field/{fields}",
- "methods":[
- "GET"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names"
- },
- "type":{
- "type":"list",
- "description":"A comma-separated list of document types",
- "deprecated":true
- },
- "fields":{
- "type":"list",
- "description":"A comma-separated list of fields"
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json
index 1d9e795c6ed5d..24fd668069697 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json
@@ -24,52 +24,10 @@
"description":"A comma-separated list of index names"
}
}
- },
- {
- "path":"/_mapping/{type}",
- "methods":[
- "GET"
- ],
- "parts":{
- "type":{
- "type":"list",
- "description":"A comma-separated list of document types",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
- },
- {
- "path":"/{index}/_mapping/{type}",
- "methods":[
- "GET"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names"
- },
- "type":{
- "type":"list",
- "description":"A comma-separated list of document types",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
"params":{
- "include_type_name":{
- "type":"boolean",
- "description":"Whether to add the type name to the response (default: false)"
- },
"ignore_unavailable":{
"type":"boolean",
"description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json
index f23380ac2f1ac..451cbccd8d329 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json
@@ -19,155 +19,10 @@
"description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices."
}
}
- },
- {
- "path":"/{index}/{type}/_mapping",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices."
- },
- "type":{
- "type":"string",
- "description":"The name of the document type",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
- },
- {
- "path":"/{index}/_mapping/{type}",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices."
- },
- "type":{
- "type":"string",
- "description":"The name of the document type",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
- },
- {
- "path":"/{index}/{type}/_mappings",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices."
- },
- "type":{
- "type":"string",
- "description":"The name of the document type",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
- },
- {
- "path":"/{index}/_mappings/{type}",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices."
- },
- "type":{
- "type":"string",
- "description":"The name of the document type",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
- },
- {
- "path":"/_mappings/{type}",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "type":{
- "type":"string",
- "description":"The name of the document type",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
- },
- {
- "path":"/{index}/_mappings",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "index":{
- "type":"list",
- "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices."
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"The plural mappings is accepted but only /_mapping is documented"
- }
- },
- {
- "path":"/_mapping/{type}",
- "methods":[
- "PUT",
- "POST"
- ],
- "parts":{
- "type":{
- "type":"string",
- "description":"The name of the document type",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
"params":{
- "include_type_name":{
- "type":"boolean",
- "description":"Whether a type should be expected in the body of the mappings."
- },
"timeout":{
"type":"time",
"description":"Explicit operation timeout"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json b/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json
index f1d35aee7d62f..e0b58139ed684 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json
@@ -26,28 +26,6 @@
"description":"The name of the index"
}
}
- },
- {
- "path":"/{index}/{type}/_mget",
- "methods":[
- "GET",
- "POST"
- ],
- "parts":{
- "index":{
- "type":"string",
- "description":"The name of the index"
- },
- "type":{
- "type":"string",
- "description":"The type of the document",
- "deprecated":true
- }
- },
- "deprecated":{
- "version":"7.0.0",
- "description":"Specifying types in urls has been deprecated"
- }
}
]
},
@@ -86,7 +64,7 @@
}
},
"body":{
- "description":"Document identifiers; can be either `docs` (containing full document information) or `ids` (when index and type is provided in the URL.",
+ "description":"Document identifiers; can be either `docs` (containing full document information) or `ids` (when index is provided in the URL.",
"required":true
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml
index 59612af74616c..8c8c6d50abf41 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml
@@ -1,10 +1,5 @@
---
"Array of objects":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
bulk:
refresh: true
@@ -28,11 +23,6 @@
---
"Empty _id":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
bulk:
refresh: true
@@ -107,12 +97,8 @@
---
"empty action":
-
- skip:
- version: " - 6.99.99"
- features: headers
- reason: types are required in requests before 7.0.0
-
+ features: headers
- do:
catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/
headers:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml
deleted file mode 100644
index 6bebed7bc1dd0..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml
+++ /dev/null
@@ -1,120 +0,0 @@
----
-"Array of objects":
- - do:
- bulk:
- refresh: true
- body:
- - index:
- _index: test_index
- _type: test_type
- _id: test_id
- - f1: v1
- f2: 42
- - index:
- _index: test_index
- _type: test_type
- _id: test_id2
- - f1: v2
- f2: 47
-
- - do:
- count:
- index: test_index
-
- - match: {count: 2}
-
----
-"Empty _id":
- - do:
- bulk:
- refresh: true
- body:
- - index:
- _index: test
- _type: type
- _id: ''
- - f: 1
- - index:
- _index: test
- _type: type
- _id: id
- - f: 2
- - index:
- _index: test
- _type: type
- - f: 3
- - match: { errors: true }
- - match: { items.0.index.status: 400 }
- - match: { items.0.index.error.type: illegal_argument_exception }
- - match: { items.0.index.error.reason: if _id is specified it must not be empty }
- - match: { items.1.index.result: created }
- - match: { items.2.index.result: created }
-
- - do:
- count:
- index: test
-
- - match: { count: 2 }
-
----
-"Empty _id with op_type create":
- - skip:
- version: " - 7.4.99"
- reason: "auto id + op type create only supported since 7.5"
-
- - do:
- bulk:
- refresh: true
- body:
- - index:
- _index: test
- _type: type
- _id: ''
- - f: 1
- - index:
- _index: test
- _type: type
- _id: id
- - f: 2
- - index:
- _index: test
- _type: type
- - f: 3
- - create:
- _index: test
- _type: type
- - f: 4
- - index:
- _index: test
- _type: type
- op_type: create
- - f: 5
- - match: { errors: true }
- - match: { items.0.index.status: 400 }
- - match: { items.0.index.error.type: illegal_argument_exception }
- - match: { items.0.index.error.reason: if _id is specified it must not be empty }
- - match: { items.1.index.result: created }
- - match: { items.2.index.result: created }
- - match: { items.3.create.result: created }
- - match: { items.4.create.result: created }
-
- - do:
- count:
- index: test
-
- - match: { count: 4 }
-
----
-"empty action":
- - skip:
- features: headers
-
- - do:
- catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/
- headers:
- Content-Type: application/json
- bulk:
- body: |
- {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}}
- {"f1": "v1", "f2": 42}
- {}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml
index b23517f6a8f25..3d956dce54289 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml
@@ -1,9 +1,5 @@
---
"List of strings":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
bulk:
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml
deleted file mode 100644
index def91f4280722..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-"List of strings":
- - do:
- bulk:
- refresh: true
- body:
- - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}}'
- - '{"f1": "v1", "f2": 42}'
- - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}}'
- - '{"f1": "v2", "f2": 47}'
-
- - do:
- count:
- index: test_index
-
- - match: {count: 2}
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml
index 38706d133e44b..8b6467eeed975 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml
@@ -1,9 +1,5 @@
---
"One big string":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
bulk:
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml
deleted file mode 100644
index 1d117253c9b01..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-"One big string":
- - do:
- bulk:
- refresh: true
- body: |
- {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}}
- {"f1": "v1", "f2": 42}
- {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}}
- {"f1": "v2", "f2": 47}
-
- - do:
- count:
- index: test_index
-
- - match: {count: 2}
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml
index 5e783d60d3d46..e29e84740ee5c 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml
@@ -1,9 +1,5 @@
---
"Source filtering":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
index:
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml
deleted file mode 100644
index 3c8a86c13bdac..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml
+++ /dev/null
@@ -1,76 +0,0 @@
----
-"Source filtering":
- - do:
- index:
- refresh: true
- index: test_index
- type: test_type
- id: test_id_1
- body: { "foo": "bar", "bar": "foo" }
-
- - do:
- index:
- refresh: true
- index: test_index
- type: test_type
- id: test_id_2
- body: { "foo": "qux", "bar": "pux" }
-
- - do:
- index:
- refresh: true
- index: test_index
- type: test_type
- id: test_id_3
- body: { "foo": "corge", "bar": "forge" }
-
-
- - do:
- bulk:
- refresh: true
- body: |
- { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": true } }
- { "doc": { "foo": "baz" } }
- { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } }
- { "_source": true, "doc": { "foo": "quux" } }
-
- - match: { items.0.update.get._source.foo: baz }
- - match: { items.1.update.get._source.foo: quux }
-
- - do:
- bulk:
- index: test_index
- type: test_type
- _source: true
- body: |
- { "update": { "_id": "test_id_3" } }
- { "doc": { "foo": "garply" } }
-
- - match: { items.0.update.get._source.foo: garply }
-
- - do:
- bulk:
- refresh: true
- body: |
- { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": {"includes": "bar"} } }
- { "doc": { "foo": "baz" } }
- { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } }
- { "_source": {"includes": "foo"}, "doc": { "foo": "quux" } }
-
- - match: { items.0.update.get._source.bar: foo }
- - is_false: items.0.update.get._source.foo
- - match: { items.1.update.get._source.foo: quux }
- - is_false: items.1.update.get._source.bar
-
- - do:
- bulk:
- index: test_index
- type: test_type
- _source_includes: foo
- body: |
- { "update": { "_id": "test_id_3" } }
- { "doc": { "foo": "garply" } }
-
- - match: { items.0.update.get._source.foo: garply }
- - is_false: items.0.update.get._source.bar
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml
index 77098779c0c4f..34fc94691c21a 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml
@@ -1,9 +1,5 @@
---
"refresh=true immediately makes changes are visible in search":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
bulk:
refresh: true
@@ -20,10 +16,6 @@
---
"refresh=empty string immediately makes changes are visible in search":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
bulk:
refresh: ""
@@ -41,10 +33,6 @@
---
"refresh=wait_for waits until changes are visible in search":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
bulk:
refresh: wait_for
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml
deleted file mode 100644
index 6326b9464caa0..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml
+++ /dev/null
@@ -1,48 +0,0 @@
----
-"refresh=true immediately makes changes are visible in search":
- - do:
- bulk:
- refresh: true
- body: |
- {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id1"}}
- {"f1": "v1", "f2": 42}
- {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id2"}}
- {"f1": "v2", "f2": 47}
-
- - do:
- count:
- index: bulk_50_refresh_1
- - match: {count: 2}
-
----
-"refresh=empty string immediately makes changes are visible in search":
- - do:
- bulk:
- refresh: ""
- body: |
- {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id3"}}
- {"f1": "v1", "f2": 42}
- {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id4"}}
- {"f1": "v2", "f2": 47}
-
- - do:
- count:
- index: bulk_50_refresh_2
- - match: {count: 2}
-
-
----
-"refresh=wait_for waits until changes are visible in search":
- - do:
- bulk:
- refresh: wait_for
- body: |
- {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id5"}}
- {"f1": "v1", "f2": 42}
- {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id6"}}
- {"f1": "v2", "f2": 47}
-
- - do:
- count:
- index: bulk_50_refresh_3
- - match: {count: 2}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/60_deprecated.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/60_deprecated.yml
index 1401fcc086208..8c8a840eb3f47 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/60_deprecated.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/60_deprecated.yml
@@ -1,12 +1,6 @@
---
"Deprecated parameters should fail in Bulk query":
-
- - skip:
- version: " - 6.99.99"
- reason: some parameters are removed starting from 7.0, their equivalents without underscore are used instead
- features: "warnings"
-
- do:
catch: bad_request
bulk:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml
deleted file mode 100644
index cad0891b21e52..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,35 +0,0 @@
----
-"bulk without types on an index that has types":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
- - do:
- bulk:
- refresh: true
- body:
- - index:
- _index: index
- _id: 0
- - foo: bar
- - index:
- _index: index
- _id: 1
- - foo: bar
-
- - do:
- count:
- index: index
-
- - match: {count: 2}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/80_cas.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/80_cas.yml
index 902621cfba578..87d3d237d42cb 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/80_cas.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/80_cas.yml
@@ -1,10 +1,5 @@
---
"Compare And Swap Sequence Numbers":
-
- - skip:
- version: " - 6.99.99"
- reason: typeless API are add in 7.0.0
-
- do:
index:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/81_cas_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/81_cas_with_types.yml
deleted file mode 100644
index 101316e7bf504..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/81_cas_with_types.yml
+++ /dev/null
@@ -1,45 +0,0 @@
----
-"Compare And Swap Sequence Numbers":
-
- - skip:
- version: " - 6.6.99"
- reason: cas operations with sequence numbers was added in 6.7
-
- - do:
- index:
- index: test_1
- type: _doc
- id: 1
- body: { foo: bar }
- - match: { _version: 1}
- - set: { _seq_no: seqno }
- - set: { _primary_term: primary_term }
-
- - do:
- bulk:
- body:
- - index:
- _index: test_1
- _type: _doc
- _id: 1
- if_seq_no: 10000
- if_primary_term: $primary_term
- - foo: bar2
-
- - match: { errors: true }
- - match: { items.0.index.status: 409 }
- - match: { items.0.index.error.type: version_conflict_engine_exception }
-
- - do:
- bulk:
- body:
- - index:
- _index: test_1
- _type: _doc
- _id: 1
- if_seq_no: $seqno
- if_primary_term: $primary_term
- - foo: bar2
-
- - match: { errors: false}
- - match: { items.0.index.status: 200 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml
index aa6c96202eaf4..db74e51cc2f91 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml
@@ -94,10 +94,6 @@
---
"cluster health basic test, one index with wait for no initializing shards":
- - skip:
- version: " - 6.1.99"
- reason: "wait_for_no_initializing_shards is introduced in 6.2.0"
-
- do:
indices.create:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml
index a40f4803ab0b8..a0432fa7aa558 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml
@@ -75,10 +75,6 @@
---
"get cluster stats returns discovery types":
- - skip:
- version: " - 6.99.99"
- reason: "discovery types are added for v7.0.0"
-
- do:
cluster.stats: {}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/count/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/count/11_basic_with_types.yml
deleted file mode 100644
index 48cfc610b435e..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/count/11_basic_with_types.yml
+++ /dev/null
@@ -1,66 +0,0 @@
-setup:
- - do:
- indices.create:
- index: test
- - do:
- index:
- index: test
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- indices.refresh:
- index: [test]
-
----
-"count with body":
- - do:
- count:
- index: test
- type: test
- body:
- query:
- match:
- foo: bar
-
- - match: {count : 1}
-
- - do:
- count:
- index: test
- body:
- query:
- match:
- foo: test
-
- - match: {count : 0}
-
----
-"count with empty body":
-# empty body should default to match_all query
- - do:
- count:
- index: test
- type: test
- body: { }
-
- - match: {count : 1}
-
- - do:
- count:
- index: test
- type: test
-
- - match: {count : 1}
-
----
-"count body without query element":
- - do:
- catch: bad_request
- count:
- index: test
- type: test
- body:
- match:
- foo: bar
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/11_with_id_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/11_with_id_with_types.yml
deleted file mode 100644
index 1e58c38c7b589..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/11_with_id_with_types.yml
+++ /dev/null
@@ -1,33 +0,0 @@
----
-"Create with ID":
- - do:
- create:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: "1"}
- - match: { _version: 1}
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: "1"}
- - match: { _version: 1}
- - match: { _source: { foo: bar }}
-
- - do:
- catch: conflict
- create:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/15_without_id_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/15_without_id_with_types.yml
deleted file mode 100644
index ab9932819381f..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/15_without_id_with_types.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-"Create without ID":
- - do:
- catch: param
- create:
- index: test_1
- type: test
- body: { foo: bar }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/36_external_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/36_external_version_with_types.yml
deleted file mode 100644
index cb8c041d7102c..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/36_external_version_with_types.yml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-"External version":
-
- - do:
- catch: bad_request
- create:
- index: test
- type: test
- id: 1
- body: { foo: bar }
- version_type: external
- version: 0
-
- - match: { status: 400 }
- - match: { error.type: action_request_validation_exception }
- - match: { error.reason: "Validation Failed: 1: create operations only support internal versioning. use index instead;" }
-
- - do:
- catch: bad_request
- create:
- index: test
- type: test
- id: 2
- body: { foo: bar }
- version_type: external
- version: 5
-
- - match: { status: 400 }
- - match: { error.type: action_request_validation_exception }
- - match: { error.reason: "Validation Failed: 1: create operations only support internal versioning. use index instead;" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/41_routing_with_types.yml
deleted file mode 100644
index 752489f722c9e..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/41_routing_with_types.yml
+++ /dev/null
@@ -1,43 +0,0 @@
----
-"Routing":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- number_of_shards: 5
- number_of_routing_shards: 5
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- create:
- index: test_1
- type: test
- id: 1
- routing: 5
- body: { foo: bar }
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- routing: 5
- stored_fields: [_routing]
-
- - match: { _id: "1"}
- - match: { _routing: "5"}
-
- - do:
- catch: missing
- get:
- index: test_1
- type: test
- id: 1
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/61_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/61_refresh_with_types.yml
deleted file mode 100644
index e24bdf4260340..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/61_refresh_with_types.yml
+++ /dev/null
@@ -1,82 +0,0 @@
----
-"Refresh":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index.refresh_interval: -1
- number_of_replicas: 0
- - do:
- create:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 1 }}
-
- - match: { hits.total: 0 }
-
- - do:
- create:
- index: test_1
- type: test
- id: 2
- refresh: true
- body: { foo: bar }
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 2 }}
-
- - match: { hits.total: 1 }
-
----
-"When refresh url parameter is an empty string that means \"refresh immediately\"":
- - do:
- create:
- index: test_1
- type: test
- id: 1
- refresh: ""
- body: { foo: bar }
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 1 }}
-
- - match: { hits.total: 1 }
-
----
-"refresh=wait_for waits until changes are visible in search":
- - do:
- index:
- index: create_60_refresh_1
- type: test
- id: create_60_refresh_id1
- body: { foo: bar }
- refresh: wait_for
- - is_false: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: create_60_refresh_1
- body:
- query: { term: { _id: create_60_refresh_id1 }}
- - match: { hits.total: 1 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml
index e6d2413f16788..6b4e7ccc48ca6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml
@@ -1,8 +1,5 @@
---
setup:
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
indices.create:
index: test_1
@@ -16,9 +13,6 @@ setup:
---
"Indexing a doc with No. nested objects less or equal to index.mapping.nested_objects.limit should succeed":
- - skip:
- version: " - 6.99.99"
- reason: index.mapping.nested_objects setting has been added in 7.0.0
- do:
create:
index: test_1
@@ -29,9 +23,6 @@ setup:
---
"Indexing a doc with No. nested objects more than index.mapping.nested_objects.limit should fail":
- - skip:
- version: " - 6.99.99"
- reason: index.mapping.nested_objects setting has been added in 7.0.0
- do:
catch: /The number of nested documents has exceeded the allowed limit of \[2\]. This limit can be set by changing the \[index.mapping.nested_objects.limit\] index level setting\./
create:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/71_nested_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/71_nested_with_types.yml
deleted file mode 100644
index 755aaca448b0b..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/71_nested_with_types.yml
+++ /dev/null
@@ -1,42 +0,0 @@
----
-setup:
- - do:
- indices.create:
- include_type_name: true
- index: test_1
- body:
- settings:
- index.mapping.nested_objects.limit: 2
- mappings:
- test_type:
- properties:
- nested1:
- type: nested
-
----
-"Indexing a doc with No. nested objects less or equal to index.mapping.nested_objects.limit should succeed":
- - skip:
- version: " - 6.99.99"
- reason: index.mapping.nested_objects setting has been added in 7.0.0
- - do:
- create:
- index: test_1
- type: test_type
- id: 1
- body:
- "nested1" : [ { "foo": "bar" }, { "foo": "bar2" } ]
- - match: { _version: 1}
-
----
-"Indexing a doc with No. nested objects more than index.mapping.nested_objects.limit should fail":
- - skip:
- version: " - 6.99.99"
- reason: index.mapping.nested_objects setting has been added in 7.0.0
- - do:
- catch: /The number of nested documents has exceeded the allowed limit of \[2\]. This limit can be set by changing the \[index.mapping.nested_objects.limit\] index level setting\./
- create:
- index: test_1
- type: test_type
- id: 1
- body:
- "nested1" : [ { "foo": "bar" }, { "foo": "bar2" }, { "foo": "bar3" } ]
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml
index 3fc10bc8db12d..6a2f852b221c2 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml
@@ -29,7 +29,6 @@
id: 1
- match: { _index: foobar }
- - match: { _type: _doc }
- match: { _id: "1"}
- match: { _version: 2}
- match: { _shards.total: 1}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml
deleted file mode 100644
index a3671d5ac24b0..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml
+++ /dev/null
@@ -1,19 +0,0 @@
----
-"Basic":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - match: { _version: 1 }
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
-
- - match: { _version: 2 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml
deleted file mode 100644
index d1bb4c0df347d..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml
+++ /dev/null
@@ -1,36 +0,0 @@
----
-"Delete check shard header":
-
- - do:
- indices.create:
- index: foobar
- body:
- settings:
- number_of_shards: "1"
- number_of_replicas: "0"
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: foobar
- type: baz
- id: 1
- body: { foo: bar }
-
- - do:
- delete:
- index: foobar
- type: baz
- id: 1
-
- - match: { _index: foobar }
- - match: { _type: baz }
- - match: { _id: "1"}
- - match: { _version: 2}
- - match: { _shards.total: 1}
- - match: { _shards.successful: 1}
- - match: { _shards.failed: 0}
- - is_false: _shards.pending
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml
deleted file mode 100644
index d01e88be8ad0b..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml
+++ /dev/null
@@ -1,26 +0,0 @@
----
-"Delete result field":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
-
- - match: { result: deleted }
-
- - do:
- catch: missing
- delete:
- index: test_1
- type: test
- id: 1
-
- - match: { result: not_found }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_cas_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_cas_with_types.yml
deleted file mode 100644
index ef352a9bad6b1..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_cas_with_types.yml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-"Internal version":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - match: { _seq_no: 0 }
-
- - do:
- catch: conflict
- delete:
- index: test_1
- type: test
- id: 1
- if_seq_no: 2
- if_primary_term: 1
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
- if_seq_no: 0
- if_primary_term: 1
-
- - match: { _seq_no: 1 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml
deleted file mode 100644
index 453d64d85bbc1..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-"External version":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external
- version: 5
-
- - match: { _version: 5}
-
- - do:
- catch: conflict
- delete:
- index: test_1
- type: test
- id: 1
- version_type: external
- version: 4
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
- version_type: external
- version: 6
-
- - match: { _version: 6}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml
deleted file mode 100644
index 70f78c17faa63..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml
+++ /dev/null
@@ -1,53 +0,0 @@
----
-"External GTE version":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external_gte
- version: 5
-
- - match: { _version: 5}
-
- - do:
- catch: conflict
- delete:
- index: test_1
- type: test
- id: 1
- version_type: external_gte
- version: 4
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
- version_type: external_gte
- version: 6
-
- - match: { _version: 6}
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external_gte
- version: 6
-
- - match: { _version: 6}
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
- version_type: external_gte
- version: 6
-
- - match: { _version: 6}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml
deleted file mode 100644
index 6f67b3a03f401..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml
+++ /dev/null
@@ -1,32 +0,0 @@
----
-"Routing":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- number_of_shards: 5
- - do:
- index:
- index: test_1
- type: test
- id: 1
- routing: 5
- body: { foo: bar }
-
- - do:
- catch: missing
- delete:
- index: test_1
- type: test
- id: 1
- routing: 4
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
- routing: 5
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml
deleted file mode 100644
index a901c1033f7c0..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml
+++ /dev/null
@@ -1,148 +0,0 @@
----
-"Refresh":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- refresh_interval: -1
- number_of_shards: 5
- number_of_routing_shards: 5
- number_of_replicas: 0
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- refresh: true
-
-# If you wonder why this document get 3 as an id instead of 2, it is because the
-# current routing algorithm would route 1 and 2 to the same shard while we need
-# them to be different for this test to pass
- - do:
- index:
- index: test_1
- type: test
- id: 3
- body: { foo: bar }
- refresh: true
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { terms: { _id: [1,3] }}
-
- - match: { hits.total: 2 }
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { terms: { _id: [1,3] }}
-
- - match: { hits.total: 2 }
-
- - do:
- delete:
- index: test_1
- type: test
- id: 3
- refresh: true
-
-# If a replica shard where doc 1 is located gets initialized at this point, doc 1
-# won't be found by the following search as the shard gets automatically refreshed
-# right before getting started. This is why this test only works with 0 replicas.
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { terms: { _id: [1,3] }}
-
- - match: { hits.total: 1 }
-
----
-"When refresh url parameter is an empty string that means \"refresh immediately\"":
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- refresh: true
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 1 }}
- - match: { hits.total: 1 }
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
- refresh: ""
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 1 }}
- - match: { hits.total: 0 }
-
----
-"refresh=wait_for waits until changes are visible in search":
- - do:
- index:
- index: delete_50_refresh_1
- type: test
- id: delete_50_refresh_id1
- body: { foo: bar }
- refresh: true
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: delete_50_refresh_1
- body:
- query: { term: { _id: delete_50_refresh_id1 }}
- - match: { hits.total: 1 }
-
- - do:
- delete:
- index: delete_50_refresh_1
- type: test
- id: delete_50_refresh_id1
- refresh: wait_for
- - is_false: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: delete_50_refresh_1
- body:
- query: { term: { _id: delete_50_refresh_id1 }}
- - match: { hits.total: 0 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml
deleted file mode 100644
index 9cfdb48ae20aa..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml
+++ /dev/null
@@ -1,19 +0,0 @@
----
-"Missing document with catch":
-
- - do:
- catch: missing
- delete:
- index: test_1
- type: test
- id: 1
-
----
-"Missing document with ignore":
-
- - do:
- delete:
- index: test_1
- type: test
- id: 1
- ignore: 404
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/70_mix_typeless_typeful.yml
deleted file mode 100644
index e0f20795e41ca..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/70_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,43 +0,0 @@
----
-"DELETE with typeless API on an index that has types":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- index:
- index: index
- type: not_doc
- id: 1
- body: { foo: bar }
-
- - do:
- catch: bad_request
- delete:
- index: index
- type: some_random_type
- id: 1
-
- - match: { error.root_cause.0.reason: "/Rejecting.mapping.update.to.\\[index\\].as.the.final.mapping.would.have.more.than.1.type.*/" }
-
- - do:
- delete:
- index: index
- id: 1
-
- - match: { _index: "index" }
- - match: { _type: "_doc" }
- - match: { _id: "1"}
- - match: { _version: 2}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml
index 1ab90e3efa83f..84f5fa67590e6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml
@@ -1,9 +1,5 @@
---
"Basic":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
exists:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/11_basic_with_types.yml
deleted file mode 100644
index 7e4e26b6b1c1c..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/11_basic_with_types.yml
+++ /dev/null
@@ -1,36 +0,0 @@
----
-"Basic":
-
- - do:
- exists:
- index: test_1
- type: test
- id: 1
-
- - is_false: ''
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "foo": "bar" }
-
- - is_true: ''
-
- - do:
- exists:
- index: test_1
- type: test
- id: 1
-
- - is_true: ''
-
- - do:
- exists:
- index: test_1
- type: test
- id: 1
- version: 1
-
- - is_true: ''
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/41_routing_with_types.yml
deleted file mode 100644
index 25315628d7ece..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/41_routing_with_types.yml
+++ /dev/null
@@ -1,41 +0,0 @@
----
-"Routing":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- number_of_shards: 5
- number_of_routing_shards: 5
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- routing: 5
- body: { foo: bar }
-
- - do:
- exists:
- index: test_1
- type: test
- id: 1
- routing: 5
-
- - is_true: ''
-
- - do:
- exists:
- index: test_1
- type: test
- id: 1
-
- - is_false: ''
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/61_realtime_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/61_realtime_refresh_with_types.yml
deleted file mode 100644
index df8c697e4a1fb..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/61_realtime_refresh_with_types.yml
+++ /dev/null
@@ -1,50 +0,0 @@
----
-"Realtime Refresh":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- refresh_interval: -1
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- exists:
- index: test_1
- type: test
- id: 1
- realtime: false
-
- - is_false: ''
-
- - do:
- exists:
- index: test_1
- type: test
- id: 1
- realtime: true
-
- - is_true: ''
-
- - do:
- exists:
- index: test_1
- type: test
- id: 1
- realtime: false
- refresh: true
-
- - is_true: ''
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml
index 6fabdd59820cf..24e296130e405 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml
@@ -1,9 +1,5 @@
---
"Client-side default type":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/71_defaults_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/71_defaults_with_types.yml
deleted file mode 100644
index 2db28f6634bd6..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/71_defaults_with_types.yml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-"Client-side default type":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "foo": "bar" }
-
- - do:
- exists:
- index: test_1
- type: _all
- id: 1
-
- - is_true: ''
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml
index bfe8da8d91519..6933d28a8492e 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml
@@ -1,8 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
indices.create:
index: test_1
@@ -34,7 +30,6 @@ setup:
- is_true: matched
- match: { explanation.value: 1 }
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: id_1 }
---
@@ -51,7 +46,6 @@ setup:
- is_true: matched
- match: { explanation.value: 1 }
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: id_1 }
---
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/11_basic_with_types.yml
deleted file mode 100644
index 5f211435ae976..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/11_basic_with_types.yml
+++ /dev/null
@@ -1,66 +0,0 @@
-setup:
- - do:
- indices.create:
- index: test_1
- body:
- aliases:
- alias_1:
- "filter" : { "term" : { "foo" : "bar"} }
-
- - do:
- index:
- index: test_1
- type: test
- id: id_1
- body: { foo: bar, title: howdy }
-
- - do:
- indices.refresh: {}
-
----
-"Basic explain":
-
- - do:
- explain:
- index: test_1
- type: test
- id: id_1
- body:
- query:
- match_all: {}
-
- - is_true: matched
- - match: { explanation.value: 1 }
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: id_1 }
-
----
-"Basic explain with alias":
-
- - do:
- explain:
- index: alias_1
- type: test
- id: id_1
- body:
- query:
- match_all: {}
-
- - is_true: matched
- - match: { explanation.value: 1 }
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: id_1 }
-
----
-"Explain body without query element":
- - do:
- catch: bad_request
- explain:
- index: test_1
- type: test
- id: id_1
- body:
- match_all: {}
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml
index ad596f980807b..3d2f42d31f4df 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml
@@ -1,9 +1,5 @@
---
"Source filtering":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
@@ -16,7 +12,6 @@
- do:
explain: { index: test_1, id: 1, _source: false, body: { query: { match_all: {}} } }
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: "1" }
- is_false: get._source
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/21_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/21_source_filtering_with_types.yml
deleted file mode 100644
index e13edf7be5046..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/21_source_filtering_with_types.yml
+++ /dev/null
@@ -1,44 +0,0 @@
----
-"Source filtering":
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 }
- - do:
- indices.refresh:
- index: test_1
-
- - do:
- explain: { index: test_1, type: test, id: 1, _source: false, body: { query: { match_all: {}} } }
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: "1" }
- - is_false: get._source
-
- - do:
- explain: { index: test_1, type: test, id: 1, _source: true, body: { query: { match_all: {}} } }
- - match: { get._source.include.field1: v1 }
-
- - do:
- explain: { index: test_1, type: test, id: 1, _source: include.field1, body: { query: { match_all: {}} } }
- - match: { get._source.include.field1: v1 }
- - is_false: get._source.include.field2
-
- - do:
- explain: { index: test_1, type: test, id: 1, _source_includes: include.field1, body: { query: { match_all: {}} } }
- - match: { get._source.include.field1: v1 }
- - is_false: get._source.include.field2
-
- - do:
- explain: { index: test_1, type: test, id: 1, _source_includes: "include.field1,include.field2", body: { query: { match_all: {}} } }
- - match: { get._source.include.field1: v1 }
- - match: { get._source.include.field2: v2 }
- - is_false: get._source.count
-
- - do:
- explain: { index: test_1, type: test, id: 1, _source_includes: include, _source_excludes: "*.field2", body: { query: { match_all: {}} } }
- - match: { get._source.include.field1: v1 }
- - is_false: get._source.include.field2
- - is_false: get._source.count
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml
index ac34d4c2495f2..5c9b391ded6b8 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml
@@ -1,9 +1,5 @@
---
"explain with query_string parameters":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
indices.create:
index: test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/31_query_string_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/31_query_string_with_types.yml
deleted file mode 100644
index b6930688acf2d..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/31_query_string_with_types.yml
+++ /dev/null
@@ -1,71 +0,0 @@
----
-"explain with query_string parameters":
- - do:
- indices.create:
- include_type_name: true
- index: test
- body:
- mappings:
- test:
- properties:
- number:
- type: integer
-
- - do:
- index:
- index: test
- type: test
- id: 1
- body: { field: foo bar}
-
- - do:
- indices.refresh:
- index: [test]
-
- - do:
- explain:
- index: test
- type: test
- id: 1
- q: bar
- df: field
-
- - is_true: matched
-
- - do:
- explain:
- index: test
- type: test
- id: 1
- q: field:foo field:xyz
-
- - is_true: matched
-
- - do:
- explain:
- index: test
- type: test
- id: 1
- q: field:foo field:xyz
- default_operator: AND
-
- - is_false: matched
-
- - do:
- explain:
- index: test
- type: test
- id: 1
- q: field:BA*
-
- - is_true: matched
-
- - do:
- explain:
- index: test
- type: test
- id: 1
- q: number:foo
- lenient: true
-
- - is_false: matched
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml
deleted file mode 100644
index 36fdbaa6b6f78..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,57 +0,0 @@
----
-"Explain with typeless API on an index that has types":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- index:
- index: index
- type: not_doc
- id: 1
- body: { foo: bar }
-
- - do:
- indices.refresh: {}
-
- - do:
- catch: missing
- explain:
- index: index
- type: some_random_type
- id: 1
- body:
- query:
- match_all: {}
-
- - match: { _index: "index" }
- - match: { _type: "some_random_type" }
- - match: { _id: "1"}
- - match: { matched: false}
-
- - do:
- explain:
- index: index
- type: _doc #todo: make _explain typeless and remove this
- id: 1
- body:
- query:
- match_all: {}
-
- - match: { _index: "index" }
- - match: { _type: "_doc" }
- - match: { _id: "1"}
- - is_true: matched
- - match: { explanation.value: 1 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml
index d125efa73011c..f1ae5c89e52a5 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml
@@ -149,10 +149,6 @@ setup:
- is_false: fields.geo.keyword.on_aggregatable_indices
---
"Get date_nanos field caps":
- - skip:
- version: " - 6.99.99"
- reason: date_nanos field mapping type has been introcued in 7.0
-
- do:
indices.create:
include_type_name: false
@@ -204,10 +200,6 @@ setup:
- is_false: fields.object\.nested2.keyword.non_searchable_indices
---
"Get object and nested field caps":
- - skip:
- version: " - 6.99.99"
- reason: object and nested fields are returned since 7.0
-
- do:
field_caps:
index: 'test1,test2,test3'
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/100_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/100_mix_typeless_typeful.yml
deleted file mode 100644
index d13229dbffbc6..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/100_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,47 +0,0 @@
----
-"GET with typeless API on an index that has types":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- index:
- index: index
- type: not_doc
- id: 1
- body: { foo: bar }
-
- - do:
- catch: missing
- get:
- index: index
- type: some_random_type
- id: 1
-
- - match: { _index: "index" }
- - match: { _type: "some_random_type" }
- - match: { _id: "1"}
- - match: { found: false}
-
- - do:
- get:
- index: index
- id: 1
-
- - match: { _index: "index" }
- - match: { _type: "_doc" }
- - match: { _id: "1"}
- - match: { _version: 1}
- - match: { _source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml
index 9183c70c29bce..822e96e405583 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml
@@ -1,10 +1,5 @@
---
"Basic":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
@@ -17,6 +12,5 @@
id: ä¸ć–‡
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: ä¸ć–‡ }
- match: { _source: { foo: "Hello: ä¸ć–‡" } }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml
deleted file mode 100644
index 0689f714d6416..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml
+++ /dev/null
@@ -1,31 +0,0 @@
----
-"Basic":
-
- - do:
- index:
- index: test_1
- type: test
- id: ä¸ć–‡
- body: { "foo": "Hello: ä¸ć–‡" }
-
- - do:
- get:
- index: test_1
- type: test
- id: ä¸ć–‡
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: ä¸ć–‡ }
- - match: { _source: { foo: "Hello: ä¸ć–‡" } }
-
- - do:
- get:
- index: test_1
- type: _all
- id: ä¸ć–‡
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: ä¸ć–‡ }
- - match: { _source: { foo: "Hello: ä¸ć–‡" } }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml
index 67065270665cf..921397b238f51 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml
@@ -1,9 +1,5 @@
---
"Default values":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
index:
index: test_1
@@ -16,7 +12,6 @@
id: 1
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: '1' }
- match: { _source: { foo: "bar" } }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml
deleted file mode 100644
index 5e08112253ef0..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-"Default values":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "foo": "bar" }
-
- - do:
- get:
- index: test_1
- type: _all
- id: 1
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: '1' }
- - match: { _source: { foo: "bar" } }
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml
index ab27842e4516e..23c7e5cbc90a6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml
@@ -1,9 +1,5 @@
---
"Stored fields":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
indices.create:
index: test_1
@@ -29,7 +25,6 @@
stored_fields: foo
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: '1' }
- match: { fields.foo: [bar] }
- is_false: _source
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml
deleted file mode 100644
index d1862fc0340d8..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml
+++ /dev/null
@@ -1,60 +0,0 @@
----
-"Stored fields":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_1
- body:
- mappings:
- test:
- properties:
- foo:
- type: keyword
- store: true
- count:
- type: integer
- store: true
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "foo": "bar", "count": 1 }
- - do:
- get:
- index: test_1
- type: test
- id: 1
- stored_fields: foo
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: '1' }
- - match: { fields.foo: [bar] }
- - is_false: _source
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- stored_fields: [foo, count]
-
- - match: { fields.foo: [bar] }
- - match: { fields.count: [1] }
- - is_false: _source
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- stored_fields: [foo, count, _source]
-
- - match: { fields.foo: [bar] }
- - match: { fields.count: [1] }
- - match: { _source.foo: bar }
-
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml
deleted file mode 100644
index 276346cda4f98..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml
+++ /dev/null
@@ -1,43 +0,0 @@
----
-"Routing":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- number_of_shards: 5
- number_of_routing_shards: 5
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- routing: 5
- body: { foo: bar }
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- routing: 5
- stored_fields: [_routing]
-
- - match: { _id: "1"}
- - match: { _routing: "5"}
-
- - do:
- catch: missing
- get:
- index: test_1
- type: test
- id: 1
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml
index 38130cee59810..d79a3bd300da8 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml
@@ -18,7 +18,6 @@
id: 1
- match: {_index: "test_1"}
- - match: { _type: _doc }
- match: {_id: "1"}
- match: {_version: 1}
- match: {found: true}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml
deleted file mode 100644
index b88dbaafc4fb2..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml
+++ /dev/null
@@ -1,26 +0,0 @@
----
-"REST test with headers":
- - skip:
- features: ["headers", "yaml"]
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "body": "foo" }
-
- - do:
- headers:
- Accept: application/yaml
- get:
- index: test_1
- type: _all
- id: 1
-
- - match: {_index: "test_1"}
- - match: {_type: "test"}
- - match: {_id: "1"}
- - match: {_version: 1}
- - match: {found: true}
- - match: { _source: { body: foo }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml
deleted file mode 100644
index 7d02b4667efe7..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml
+++ /dev/null
@@ -1,49 +0,0 @@
----
-"Realtime Refresh":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- refresh_interval: -1
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- catch: missing
- get:
- index: test_1
- type: test
- id: 1
- realtime: false
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- realtime: true
-
- - is_true: found
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- realtime: false
- refresh: true
-
- - is_true: found
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml
index f4a5ba39be3b8..8ef3ad708fc18 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml
@@ -1,9 +1,5 @@
---
"Source filtering":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
indices.create:
index: test_1
@@ -23,7 +19,6 @@
get: { index: test_1, id: 1, _source: false }
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: "1" }
- is_false: _source
@@ -62,7 +57,6 @@
_source: true
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: "1" }
- match: { fields.count: [1] }
- match: { _source.include.field1: v1 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml
deleted file mode 100644
index 3ac493c629f20..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml
+++ /dev/null
@@ -1,69 +0,0 @@
----
-"Source filtering":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_1
- body:
- mappings:
- test:
- properties:
- count:
- type: integer
- store: true
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 }
- - do:
- get: { index: test_1, type: test, id: 1, _source: false }
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: "1" }
- - is_false: _source
-
- - do:
- get: { index: test_1, type: test, id: 1, _source: true }
- - match: { _source.include.field1: v1 }
-
- - do:
- get: { index: test_1, type: test, id: 1, _source: include.field1 }
- - match: { _source.include.field1: v1 }
- - is_false: _source.include.field2
-
- - do:
- get: { index: test_1, type: test, id: 1, _source_includes: include.field1 }
- - match: { _source.include.field1: v1 }
- - is_false: _source.include.field2
-
- - do:
- get: { index: test_1, type: test, id: 1, _source_includes: "include.field1,include.field2" }
- - match: { _source.include.field1: v1 }
- - match: { _source.include.field2: v2 }
- - is_false: _source.count
-
- - do:
- get: { index: test_1, type: test, id: 1, _source_includes: include, _source_excludes: "*.field2" }
- - match: { _source.include.field1: v1 }
- - is_false: _source.include.field2
- - is_false: _source.count
-
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- stored_fields: count
- _source: true
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: "1" }
- - match: { fields.count: [1] }
- - match: { _source.include.field1: v1 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml
index d7d8edfc65dcb..30efd759c1a65 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml
@@ -1,9 +1,5 @@
---
"Missing document with catch":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
catch: missing
get:
@@ -12,10 +8,6 @@
---
"Missing document with ignore":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
get:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml
deleted file mode 100644
index a60d11388566d..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml
+++ /dev/null
@@ -1,19 +0,0 @@
----
-"Missing document with catch":
-
- - do:
- catch: missing
- get:
- index: test_1
- type: test
- id: 1
-
----
-"Missing document with ignore":
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- ignore: 404
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml
deleted file mode 100644
index c6631b83b1867..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml
+++ /dev/null
@@ -1,89 +0,0 @@
----
-"Versions":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- - match: { _version: 1}
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- - match: { _version: 2}
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- version: 2
- - match: { _id: "1" }
-
- - do:
- catch: conflict
- get:
- index: test_1
- type: test
- id: 1
- version: 1
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- version: 2
- version_type: external
- - match: { _id: "1" }
-
- - do:
- catch: conflict
- get:
- index: test_1
- type: test
- id: 1
- version: 10
- version_type: external
-
- - do:
- catch: conflict
- get:
- index: test_1
- type: test
- id: 1
- version: 1
- version_type: external
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- version: 2
- version_type: external_gte
- - match: { _id: "1" }
-
- - do:
- catch: conflict
- get:
- index: test_1
- type: test
- id: 1
- version: 10
- version_type: external_gte
-
- - do:
- catch: conflict
- get:
- index: test_1
- type: test
- id: 1
- version: 1
- version_type: external_gte
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml
index 6f81c430c883a..887e31f33d45e 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml
@@ -1,10 +1,5 @@
---
"Basic":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/11_basic_with_types.yml
deleted file mode 100644
index 1446f569e86d8..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/11_basic_with_types.yml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-"Basic with types":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "foo": "bar" }
-
- - do:
- get_source:
- index: test_1
- type: test
- id: 1
-
- - match: { '': { foo: bar } }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml
index 57c11a1ca10e2..73fce7ce09bbf 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml
@@ -1,11 +1,5 @@
---
"Default values":
-
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/16_default_values_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/16_default_values_with_types.yml
deleted file mode 100644
index e2de7a9f0007c..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/16_default_values_with_types.yml
+++ /dev/null
@@ -1,16 +0,0 @@
----
-"Default values":
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "foo": "bar" }
-
- - do:
- get_source:
- index: test_1
- type: test
- id: 1
-
- - match: { '': { foo: bar } }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/41_routing_with_types.yml
deleted file mode 100644
index db53a33ba597e..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/41_routing_with_types.yml
+++ /dev/null
@@ -1,42 +0,0 @@
----
-"Routing":
-
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- number_of_shards: 5
- number_of_routing_shards: 5
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- routing: 5
- body: { foo: bar }
-
- - do:
- get_source:
- index: test_1
- type: test
- id: 1
- routing: 5
-
- - match: { '': {foo: bar}}
-
- - do:
- catch: missing
- get_source:
- index: test_1
- type: test
- id: 1
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/61_realtime_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/61_realtime_refresh_with_types.yml
deleted file mode 100644
index f5b406de28b4a..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/61_realtime_refresh_with_types.yml
+++ /dev/null
@@ -1,49 +0,0 @@
----
-"Realtime":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- refresh_interval: -1
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- catch: missing
- get_source:
- index: test_1
- type: test
- id: 1
- realtime: false
-
- - do:
- get_source:
- index: test_1
- type: test
- id: 1
- realtime: true
-
- - match: { '': {foo: bar}}
-
- - do:
- get_source:
- index: test_1
- type: test
- id: 1
- realtime: false
- refresh: true
-
- - match: { '': {foo: bar}}
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml
index 2665458cea95d..0836979fbf83a 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml
@@ -1,11 +1,5 @@
---
"Source filtering":
-
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/71_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/71_source_filtering_with_types.yml
deleted file mode 100644
index b4f20fee53be2..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/71_source_filtering_with_types.yml
+++ /dev/null
@@ -1,27 +0,0 @@
----
-"Source filtering":
-
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 }
-
- - do:
- get_source: { index: test_1, type: test, id: 1, _source_includes: include.field1 }
- - match: { include.field1: v1 }
- - is_false: include.field2
-
- - do:
- get_source: { index: test_1, type: test, id: 1, _source_includes: "include.field1,include.field2" }
- - match: { include.field1: v1 }
- - match: { include.field2: v2 }
- - is_false: count
-
- - do:
- get_source: { index: test_1, type: test, id: 1, _source_includes: include, _source_excludes: "*.field2" }
- - match: { include.field1: v1 }
- - is_false: include.field2
- - is_false: count
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/81_missing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/81_missing_with_types.yml
deleted file mode 100644
index 16eb5ea51e898..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/81_missing_with_types.yml
+++ /dev/null
@@ -1,19 +0,0 @@
----
-"Missing document with catch":
-
- - do:
- catch: missing
- get_source:
- index: test_1
- type: test
- id: 1
-
----
-"Missing document with ignore":
-
- - do:
- get_source:
- index: test_1
- type: test
- id: 1
- ignore: 404
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/86_source_missing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/86_source_missing_with_types.yml
deleted file mode 100644
index d7cfced5164ec..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/86_source_missing_with_types.yml
+++ /dev/null
@@ -1,39 +0,0 @@
----
-setup:
-
- - do:
- indices.create:
- include_type_name: true
- index: test_1
- body:
- mappings:
- test:
- _source: { enabled: false }
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
-
----
-"Missing document source with catch":
-
- - do:
- catch: missing
- get_source:
- index: test_1
- type: test
- id: 1
-
----
-"Missing document source with ignore":
-
- - do:
- get_source:
- index: test_1
- type: test
- id: 1
- ignore: 404
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml
index a129dcab80d9a..97eb9be1547ba 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml
@@ -12,7 +12,6 @@
body: { foo: bar }
- match: { _index: test-weird-index-ä¸ć–‡ }
- - match: { _type: _doc }
- match: { _id: "1"}
- match: { _version: 1}
@@ -22,7 +21,6 @@
id: 1
- match: { _index: test-weird-index-ä¸ć–‡ }
- - match: { _type: _doc }
- match: { _id: "1"}
- match: { _version: 1}
- match: { _source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml
deleted file mode 100644
index daac81849fb5e..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml
+++ /dev/null
@@ -1,34 +0,0 @@
----
-"Index with ID":
-
- - do:
- index:
- index: test-weird-index-ä¸ć–‡
- type: weird.type
- id: 1
- body: { foo: bar }
-
- - match: { _index: test-weird-index-ä¸ć–‡ }
- - match: { _type: weird.type }
- - match: { _id: "1"}
- - match: { _version: 1}
-
- - do:
- get:
- index: test-weird-index-ä¸ć–‡
- type: weird.type
- id: 1
-
- - match: { _index: test-weird-index-ä¸ć–‡ }
- - match: { _type: weird.type }
- - match: { _id: "1"}
- - match: { _version: 1}
- - match: { _source: { foo: bar }}
-
- - do:
- catch: bad_request
- index:
- index: idx
- type: type
- id: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
- body: { foo: bar }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml
index f8a50415a95ef..478a731828738 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml
@@ -1,9 +1,5 @@
---
"Index result field":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
index:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml
deleted file mode 100644
index 45ebe0bbd3dc1..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-"Index result field":
-
- - do:
- index:
- index: test_index
- type: test
- id: 1
- body: { foo: bar }
-
- - match: { result: created }
-
- - do:
- index:
- index: test_index
- type: test
- id: 1
- body: { foo: bar }
- op_type: index
-
- - match: { result: updated }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml
index 073a4704b4ef8..54f203e3621bc 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml
@@ -12,7 +12,6 @@
- is_true: _id
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _version: 1 }
- set: { _id: id }
@@ -22,7 +21,6 @@
id: '$id'
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: $id }
- match: { _version: 1 }
- match: { _source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml
deleted file mode 100644
index 3fff0512b9602..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml
+++ /dev/null
@@ -1,26 +0,0 @@
----
-"Index without ID":
-
- - do:
- index:
- index: test_1
- type: test
- body: { foo: bar }
-
- - is_true: _id
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _version: 1 }
- - set: { _id: id }
-
- - do:
- get:
- index: test_1
- type: test
- id: '$id'
-
- - match: { _index: test_1 }
- - match: { _type: test }
- - match: { _id: $id }
- - match: { _version: 1 }
- - match: { _source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml
deleted file mode 100644
index 60ae26d46d07d..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml
+++ /dev/null
@@ -1,29 +0,0 @@
----
-"Optype":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- op_type: create
- body: { foo: bar }
-
- - do:
- catch: conflict
- index:
- index: test_1
- type: test
- id: 1
- op_type: create
- body: { foo: bar }
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- op_type: index
- body: { foo: bar }
-
- - match: { _version: 2 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_cas.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_cas.yml
index 550582e9816eb..27534131782a5 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_cas.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_cas.yml
@@ -1,10 +1,5 @@
---
"Compare And Swap Sequence Numbers":
-
- - skip:
- version: " - 6.99.99"
- reason: typesless api was introduces in 7.0
-
- do:
index:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml
deleted file mode 100644
index f17e6b749319d..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml
+++ /dev/null
@@ -1,55 +0,0 @@
----
-"External version":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external
- version: 0
-
- - match: { _version: 0 }
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external
- version: 5
-
- - match: { _version: 5 }
-
- - do:
- catch: conflict
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external
- version: 5
-
- - do:
- catch: conflict
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external
- version: 0
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external
- version: 6
-
- - match: { _version: 6}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml
deleted file mode 100644
index dccbe02ea1400..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml
+++ /dev/null
@@ -1,56 +0,0 @@
----
-"External GTE version":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external_gte
- version: 0
-
- - match: { _version: 0}
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external_gte
- version: 5
-
- - match: { _version: 5}
-
- - do:
- catch: conflict
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
- version_type: external_gte
- version: 0
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar2 }
- version_type: external_gte
- version: 5
-
- - match: { _version: 5}
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar2 }
- version_type: external_gte
- version: 6
-
- - match: { _version: 6}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml
deleted file mode 100644
index 5b0cf94f4236b..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml
+++ /dev/null
@@ -1,43 +0,0 @@
----
-"Routing":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- number_of_shards: 5
- number_of_routing_shards: 5
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- routing: 5
- body: { foo: bar }
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- routing: 5
- stored_fields: [_routing]
-
- - match: { _id: "1"}
- - match: { _routing: "5"}
-
- - do:
- catch: missing
- get:
- index: test_1
- type: test
- id: 1
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml
deleted file mode 100644
index be44cafd43020..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml
+++ /dev/null
@@ -1,83 +0,0 @@
----
-"Refresh":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index.refresh_interval: -1
- number_of_replicas: 0
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 1 }}
-
- - match: { hits.total: 0 }
-
- - do:
- index:
- index: test_1
- type: test
- id: 2
- refresh: true
- body: { foo: bar }
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 2 }}
-
- - match: { hits.total: 1 }
-
----
-"When refresh url parameter is an empty string that means \"refresh immediately\"":
- - do:
- index:
- index: test_1
- type: test
- id: 1
- refresh: ""
- body: { foo: bar }
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 1 }}
-
- - match: { hits.total: 1 }
-
----
-"refresh=wait_for waits until changes are visible in search":
- - do:
- index:
- index: index_60_refresh_1
- type: test
- id: index_60_refresh_id1
- body: { foo: bar }
- refresh: wait_for
- - is_false: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: index_60_refresh_1
- body:
- query: { term: { _id: index_60_refresh_id1 }}
- - match: { hits.total: 1 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/70_mix_typeless_typeful.yml
deleted file mode 100644
index f3629fbb7cc18..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/70_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,102 +0,0 @@
----
-"Index with typeless API on an index that has types":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- index:
- index: index
- id: 1
- body: { foo: bar }
-
- - match: { _index: "index" }
- - match: { _type: "_doc" }
- - match: { _id: "1"}
- - match: { _version: 1}
-
- - do:
- get: # not using typeless API on purpose
- index: index
- type: not_doc
- id: 1
-
- - match: { _index: "index" }
- - match: { _type: "not_doc" } # the important bit to check
- - match: { _id: "1"}
- - match: { _version: 1}
- - match: { _source: { foo: bar }}
-
-
- - do:
- index:
- index: index
- body: { foo: bar }
-
- - match: { _index: "index" }
- - match: { _type: "_doc" }
- - match: { _version: 1}
- - set: { _id: id }
-
- - do:
- get: # using typeful API on purpose
- index: index
- type: not_doc
- id: '$id'
-
- - match: { _index: "index" }
- - match: { _type: "not_doc" } # the important bit to check
- - match: { _id: $id}
- - match: { _version: 1}
- - match: { _source: { foo: bar }}
-
----
-"Index call that introduces new field mappings":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
- - do:
- index:
- index: index
- id: 2
- body: { new_field: value }
-
- - match: { _index: "index" }
- - match: { _type: "_doc" }
- - match: { _id: "2" }
- - match: { _version: 1 }
-
- - do:
- get: # using typeful API on purpose
- index: index
- type: not_doc
- id: 2
-
- - match: { _index: "index" }
- - match: { _type: "not_doc" }
- - match: { _id: "2" }
- - match: { _version: 1}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml
index 87d3b77aee329..37a14d9abb669 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml
@@ -9,9 +9,6 @@ setup:
---
"_analyze with No. generated tokens less than or equal to index.analyze.max_token_count should succeed":
- - skip:
- version: " - 6.99.99"
- reason: index.analyze.max_token_count setting has been added in 7.0.0
- do:
indices.analyze:
index: test_1
@@ -25,9 +22,6 @@ setup:
---
"_analyze with No. generated tokens more than index.analyze.max_token_count should fail":
- - skip:
- version: " - 6.99.99"
- reason: index.analyze.max_token_count setting has been added in 7.0.0
- do:
catch: /The number of tokens produced by calling _analyze has exceeded the allowed maximum of \[3\]. This limit can be set by changing the \[index.analyze.max_token_count\] index level setting\./
indices.analyze:
@@ -39,9 +33,6 @@ setup:
---
"_analyze with explain with No. generated tokens more than index.analyze.max_token_count should fail":
- - skip:
- version: " - 6.99.99"
- reason: index.analyze.max_token_count setting has been added in 7.0.0
- do:
catch: /The number of tokens produced by calling _analyze has exceeded the allowed maximum of \[3\]. This limit can be set by changing the \[index.analyze.max_token_count\] index level setting\./
indices.analyze:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml
index 099226e41e6d3..94b23fb63adb5 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml
@@ -11,10 +11,6 @@
---
"clear_cache with fielddata set to true":
- - skip:
- version: " - 6.2.99"
- reason: fielddata was deprecated before 6.3.0
-
- do:
indices.clear_cache:
fielddata: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml
index 412d29905ffc2..a4d1841ed7108 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml
@@ -66,7 +66,6 @@ setup:
id: "1"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "1" }
- match: { _source: { foo: "hello world" } }
@@ -77,7 +76,6 @@ setup:
id: "2"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "2" }
- match: { _source: { foo: "hello world 2" } }
@@ -88,7 +86,6 @@ setup:
id: "3"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "3" }
- match: { _source: { foo: "hello world 3" } }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml
index c8ede7cd90284..ce8a6604069ed 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml
@@ -1,9 +1,5 @@
---
"Create index with mappings":
-
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
- do:
indices.create:
index: test_index
@@ -19,10 +15,6 @@
---
"Create index with settings":
-
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
- do:
indices.create:
index: test_index
@@ -38,10 +30,6 @@
---
"Create index":
-
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
- do:
indices.create:
index: test_index
@@ -51,10 +39,6 @@
---
"Create index with wait_for_active_shards set to all":
-
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
- do:
indices.create:
index: test_index
@@ -68,10 +52,6 @@
---
"Create index with aliases":
-
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
- do:
indices.create:
index: test_index
@@ -102,9 +82,6 @@
---
"Create index with write aliases":
- - skip:
- version: " - 6.99.99"
- reason: is_write_index is not implemented in ES <= 6.x
- do:
indices.create:
index: test_index
@@ -138,9 +115,6 @@
---
"Create index with explicit _doc type":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
- do:
catch: bad_request
indices.create:
@@ -155,19 +129,3 @@
- match: { error.type: "illegal_argument_exception" }
- match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." }
----
-"Create index without soft deletes":
- - skip:
- version: " - 7.5.99"
- reason: "indices without soft deletes are deprecated in 7.6"
- features: "allowed_warnings"
-
- - do:
- allowed_warnings:
- - Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
- Please do not specify value for setting [index.soft_deletes.enabled] of index [test_index].
- indices.create:
- index: test_index
- body:
- settings:
- soft_deletes.enabled: false
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml
deleted file mode 100644
index f5aeb53751119..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml
+++ /dev/null
@@ -1,143 +0,0 @@
----
-"Create index with mappings":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- type_1: {}
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_index
-
- - is_true: test_index.mappings.type_1
-
----
-"Create index with settings":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- settings:
- number_of_replicas: "0"
-
- - do:
- indices.get_settings:
- index: test_index
-
- - match: { test_index.settings.index.number_of_replicas: "0"}
-
----
-"Create index":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_index
-
- - match: { acknowledged: true }
- - match: { index: "test_index"}
-
----
-"Create index with wait_for_active_shards set to all":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- wait_for_active_shards: all
- body:
- settings:
- number_of_replicas: "0"
-
- - match: { acknowledged: true }
- - match: { shards_acknowledged: true }
-
----
-"Create index with aliases":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- type_1:
- properties:
- field:
- type: text
- aliases:
- test_alias: {}
- test_blias:
- routing: b
- test_clias:
- filter:
- term:
- field : value
-
- - do:
- indices.get_alias:
- index: test_index
-
- - match: {test_index.aliases.test_blias.search_routing: b}
- - match: {test_index.aliases.test_blias.index_routing: b}
- - is_false: test_index.aliases.test_blias.filter
- - match: {test_index.aliases.test_clias.filter.term.field: value}
- - is_false: test_index.aliases.test_clias.index_routing
- - is_false: test_index.aliases.test_clias.search_routing
-
----
-"Create index with write aliases":
- - skip:
- version: " - 6.99.99"
- reason: is_write_index is not implemented in ES <= 6.x
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- aliases:
- test_alias: {}
- test_blias:
- is_write_index: false
- test_clias:
- is_write_index: true
-
- - do:
- indices.get_alias:
- index: test_index
-
- - is_false: test_index.aliases.test_alias.is_write_index
- - is_false: test_index.aliases.test_blias.is_write_index
- - is_true: test_index.aliases.test_clias.is_write_index
-
----
-"Create index with no type mappings":
- - do:
- catch: /illegal_argument_exception/
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- "" : {}
-
----
-"Create index with invalid mappings":
- - do:
- catch: /illegal_argument_exception/
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- "":
- type: keyword
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml
deleted file mode 100644
index 9167574ea9a8e..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,149 +0,0 @@
----
-"Create a typeless index while there is a typed template":
-
- - skip:
- version: " - 6.6.99"
- reason: Merging typeless/typed mappings/templates was added in 6.7
- features: allowed_warnings
-
- - do:
- indices.put_template:
- include_type_name: true
- name: test_template
- body:
- index_patterns: test-*
- mappings:
- my_type:
- properties:
- foo:
- type: keyword
-
- - do:
- allowed_warnings:
- - "index [test-1] matches multiple legacy templates [global, test_template], composable templates will only match a single template"
- indices.create:
- index: test-1
- body:
- mappings:
- properties:
- bar:
- type: "long"
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test-1
-
- - is_true: test-1.mappings._doc # the index creation call won
- - is_false: test-1.mappings.my_type
- - is_true: test-1.mappings._doc.properties.foo
- - is_true: test-1.mappings._doc.properties.bar
-
----
-"Create a typed index while there is a typeless template":
-
- - skip:
- version: " - 6.6.99"
- reason: Merging typeless/typed mappings/templates was added in 6.7
- features: allowed_warnings
-
- - do:
- indices.put_template:
- include_type_name: false
- name: test_template
- body:
- index_patterns: test-*
- mappings:
- properties:
- foo:
- type: keyword
-
- - do:
- allowed_warnings:
- - "index [test-1] matches multiple legacy templates [global, test_template], composable templates will only match a single template"
- indices.create:
- include_type_name: true
- index: test-1
- body:
- mappings:
- my_type:
- properties:
- bar:
- type: "long"
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test-1
-
- - is_true: test-1.mappings.my_type # the index creation call won
- - is_false: test-1.mappings._doc
- - is_true: test-1.mappings.my_type.properties.foo
- - is_true: test-1.mappings.my_type.properties.bar
-
----
-"Implicitly create a typed index while there is a typeless template":
-
- - skip:
- version: " - 6.99.99"
- reason: include_type_name only supported as of 6.7
-
- - do:
- indices.put_template:
- include_type_name: false
- name: test_template
- body:
- index_patterns: test-*
- mappings:
- properties:
- foo:
- type: keyword
-
- - do:
- catch: /the final mapping would have more than 1 type/
- index:
- index: test-1
- type: my_type
- body: { bar: 42 }
-
----
-"Implicitly create a typeless index while there is a typed template":
-
- - skip:
- version: " - 6.99.99"
- reason: needs typeless index operations to work on typed indices
- features: allowed_warnings
-
- - do:
- indices.put_template:
- include_type_name: true
- name: test_template
- body:
- index_patterns: test-*
- mappings:
- my_type:
- properties:
- foo:
- type: keyword
-
- - do:
- allowed_warnings:
- - "index [test-1] matches multiple legacy templates [global, test_template], composable templates will only match a single template"
- index:
- index: test-1
- body: { bar: 42 }
-
-# ensures dynamic mapping update is visible to get_mapping
- - do:
- cluster.health:
- wait_for_events: normal
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test-1
-
- - is_true: test-1.mappings.my_type # the template is honored
- - is_false: test-1.mappings._doc
- - is_true: test-1.mappings.my_type.properties.foo
- - is_true: test-1.mappings.my_type.properties.bar
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml
index 781d133153605..29b3c1208a7b5 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml
@@ -1,38 +1,5 @@
----
-"Index synced flush rest test":
- - skip:
- version: " - 7.5.99"
- reason: "synced flush is deprecated in 7.6"
- features: "allowed_warnings"
- - do:
- indices.create:
- index: testing
- body:
- settings:
- index:
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
- - do:
- allowed_warnings:
- - Synced flush is deprecated and will be removed in 8.0. Use flush at _/flush or /{index}/_flush instead.
- indices.flush_synced:
- index: testing
-
- - is_false: _shards.failed
-
- - do:
- indices.stats: {level: shards}
-
- - is_true: indices.testing.shards.0.0.commit.user_data.sync_id
-
---
"Flush stats":
- - skip:
- version: " - 6.2.99"
- reason: periodic flush stats is introduced in 6.3.0
- do:
indices.create:
index: test
@@ -50,7 +17,6 @@
- do:
index:
index: test
- type: doc
id: 1
body: { "message": "a long message to make a periodic flush happen after this index operation" }
- do:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml
index 413c4bcb8d28c..85267f49b1317 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml
@@ -44,10 +44,6 @@ setup:
---
"Test include_type_name":
- - skip:
- version: " - 6.6.99"
- reason: the include_type_name parameter is not supported before 6.7
-
- do:
indices.get:
include_type_name: true
@@ -66,10 +62,6 @@ setup:
---
"Test include_type_name dafaults to false":
- - skip:
- version: " - 6.99.99"
- reason: the include_type_name parameter default is different on 6.x and 7.0, so only test this on 7.0 clusters
-
- do:
indices.get:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_alias/30_wildcards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_alias/30_wildcards.yml
index 08b3009be0e88..389166a03136e 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_alias/30_wildcards.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_alias/30_wildcards.yml
@@ -26,9 +26,6 @@ setup:
---
"Get aliases wildcard and simple exclusion":
- - skip:
- version: " - 6.99.99"
- reason: Exclusions in the alias expression are not handled
- do:
indices.get_alias:
name: test_blias_2,test_alias*,-test_alias_1
@@ -41,9 +38,6 @@ setup:
---
"Get aliases and wildcard exclusion":
- - skip:
- version: " - 6.99.99"
- reason: Exclusions in the alias expression are not handled
- do:
indices.get_alias:
name: test_alias_1,test_blias_1,-test_alias*
@@ -66,9 +60,6 @@ setup:
---
"Non-existent exclusion alias before wildcard returns 404":
- - skip:
- version: " - 6.99.99"
- reason: Exclusions in the alias expression are not handled
- do:
catch: missing
indices.get_alias:
@@ -97,9 +88,6 @@ setup:
---
"Missing exclusions does not fire 404":
- - skip:
- version: " - 6.99.99"
- reason: Exclusions in the alias expression are not handled
- do:
indices.get_alias:
name: test_alias*,-non-existent,test_blias*,-test
@@ -112,9 +100,6 @@ setup:
---
"Exclusion of non wildcarded aliases":
- - skip:
- version: " - 6.99.99"
- reason: Exclusions in the alias expression are not handled
- do:
indices.get_alias:
name: test_alias_1,test_blias_2,-test_alias*,-test_blias_2
@@ -123,9 +108,6 @@ setup:
---
"Wildcard exclusions does not trigger 404":
- - skip:
- version: " - 6.99.99"
- reason: Exclusions in the alias expression are not handled
- do:
catch: missing
indices.get_alias:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml
index 84f2a0210fcf4..b132aa6bf03de 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml
@@ -1,8 +1,5 @@
---
setup:
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
- do:
indices.create:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml
deleted file mode 100644
index 0a7f5fa3560ba..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml
+++ /dev/null
@@ -1,83 +0,0 @@
----
-setup:
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- text:
- type: text
-
----
-"Get field mapping with no index and type":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- fields: text
-
- - match: {test_index.mappings.test_type.text.mapping.text.type: text}
-
----
-"Get field mapping by index only":
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- fields: text
-
- - match: {test_index.mappings.test_type.text.mapping.text.type: text}
-
----
-"Get field mapping by type & field":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- type: test_type
- fields: text
-
- - match: {test_index.mappings.test_type.text.mapping.text.type: text}
-
----
-"Get field mapping by type & field, with another field that doesn't exist":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- type: test_type
- fields: [ text , text1 ]
-
- - match: {test_index.mappings.test_type.text.mapping.text.type: text}
- - is_false: test_index.mappings.test_type.text1
-
----
-"Get field mapping with include_defaults":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- type: test_type
- fields: text
- include_defaults: true
-
- - match: {test_index.mappings.test_type.text.mapping.text.type: text}
- - match: {test_index.mappings.test_type.text.mapping.text.analyzer: default}
-
----
-"Get field mapping should work without index specifying type and fields":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- type: test_type
- fields: text
-
- - match: {test_index.mappings.test_type.text.mapping.text.type: text}
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml
index 1570ded351874..be6b1c3bb6d49 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml
@@ -1,8 +1,5 @@
---
"Return empty object if field doesn't exist, but type and index do":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
- do:
indices.create:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml
deleted file mode 100644
index 264d187ebd22d..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml
+++ /dev/null
@@ -1,23 +0,0 @@
----
-"Return empty object if field doesn't exist, but type and index do":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- type: test_type
- fields: not_existent
-
- - match: { '': {}}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml
deleted file mode 100644
index 0bf3f1f7823ee..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml
+++ /dev/null
@@ -1,22 +0,0 @@
----
-"Raise 404 when type doesn't exist":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- catch: missing
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- type: not_test_type
- fields: text
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml
index 7db61d122e7ce..2c9ff58b445df 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml
@@ -1,8 +1,5 @@
---
setup:
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
- do:
indices.create:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml
deleted file mode 100644
index 68c183e9b292e..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml
+++ /dev/null
@@ -1,144 +0,0 @@
----
-setup:
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- t1:
- type: text
- t2:
- type: text
- obj:
- properties:
- t1:
- type: text
- i_t1:
- type: text
- i_t3:
- type: text
-
- - do:
- indices.create:
- include_type_name: true
- index: test_index_2
- body:
- mappings:
- test_type_2:
- properties:
- t1:
- type: text
- t2:
- type: text
- obj:
- properties:
- t1:
- type: text
- i_t1:
- type: text
- i_t3:
- type: text
-
----
-"Get field mapping with * for fields":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- fields: "*"
-
- - match: {test_index.mappings.test_type.t1.full_name: t1 }
- - match: {test_index.mappings.test_type.t2.full_name: t2 }
- - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 }
- - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 }
- - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 }
-
----
-"Get field mapping with t* for fields":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- fields: "t*"
-
- - match: {test_index.mappings.test_type.t1.full_name: t1 }
- - match: {test_index.mappings.test_type.t2.full_name: t2 }
- - length: {test_index.mappings.test_type: 2}
-
----
-"Get field mapping with *t1 for fields":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- fields: "*t1"
- - match: {test_index.mappings.test_type.t1.full_name: t1 }
- - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 }
- - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 }
- - length: {test_index.mappings.test_type: 3}
-
----
-"Get field mapping with wildcarded relative names":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: test_index
- fields: "obj.i_*"
- - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 }
- - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 }
- - length: {test_index.mappings.test_type: 2}
-
----
-"Get field mapping should work using '_all' for indices and types":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: _all
- type: _all
- fields: "t*"
- - match: {test_index.mappings.test_type.t1.full_name: t1 }
- - match: {test_index.mappings.test_type.t2.full_name: t2 }
- - length: {test_index.mappings.test_type: 2}
- - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 }
- - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 }
- - length: {test_index_2.mappings.test_type_2: 2}
-
----
-"Get field mapping should work using '*' for indices and types":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: '*'
- type: '*'
- fields: "t*"
- - match: {test_index.mappings.test_type.t1.full_name: t1 }
- - match: {test_index.mappings.test_type.t2.full_name: t2 }
- - length: {test_index.mappings.test_type: 2}
- - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 }
- - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 }
- - length: {test_index_2.mappings.test_type_2: 2}
-
----
-"Get field mapping should work using comma_separated values for indices and types":
-
- - do:
- indices.get_field_mapping:
- include_type_name: true
- index: 'test_index,test_index_2'
- type: 'test_type,test_type_2'
- fields: "t*"
- - match: {test_index.mappings.test_type.t1.full_name: t1 }
- - match: {test_index.mappings.test_type.t2.full_name: t2 }
- - length: {test_index.mappings.test_type: 2}
- - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 }
- - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 }
- - length: {test_index_2.mappings.test_type_2: 2}
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml
deleted file mode 100644
index 2b6433a3e98f8..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-"GET mapping with typeless API on an index that has types":
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- indices.get_field_mapping:
- include_type_name: false
- index: index
- fields: foo
-
- - match: { index.mappings.foo.mapping.foo.type: "keyword" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml
index c3addd95469d4..e46f67326a8d2 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml
@@ -1,8 +1,5 @@
---
setup:
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
- do:
indices.create:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml
deleted file mode 100644
index 598cc24f7806b..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml
+++ /dev/null
@@ -1,158 +0,0 @@
----
-setup:
- - do:
- indices.create:
- include_type_name: true
- index: test_1
- body:
- mappings:
- doc: {}
- - do:
- indices.create:
- include_type_name: true
- index: test_2
- body:
- mappings:
- doc: {}
----
-"Get /{index}/_mapping with empty mappings":
-
- - do:
- indices.create:
- index: t
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: t
-
- - match: { t.mappings: {}}
-
----
-"Get /_mapping":
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - is_true: test_1.mappings.doc
- - is_true: test_2.mappings.doc
-
----
-"Get /{index}/_mapping":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_1
-
- - is_true: test_1.mappings.doc
- - is_false: test_2
-
-
----
-"Get /{index}/_mapping/_all":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_1
- type: _all
-
- - is_true: test_1.mappings.doc
- - is_false: test_2
-
----
-"Get /{index}/_mapping/*":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_1
- type: '*'
-
- - is_true: test_1.mappings.doc
- - is_false: test_2
-
----
-"Get /{index}/_mapping/{type}":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_1
- type: doc
-
- - is_true: test_1.mappings.doc
- - is_false: test_2
-
----
-"Get /{index}/_mapping/{type*}":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_1
- type: 'd*'
-
- - is_true: test_1.mappings.doc
- - is_false: test_2
-
----
-"Get /_mapping/{type}":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- type: doc
-
- - is_true: test_1.mappings.doc
- - is_true: test_2.mappings.doc
-
----
-"Get /_all/_mapping/{type}":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: _all
- type: doc
-
- - is_true: test_1.mappings.doc
- - is_true: test_2.mappings.doc
-
----
-"Get /*/_mapping/{type}":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: '*'
- type: doc
-
- - is_true: test_1.mappings.doc
- - is_true: test_2.mappings.doc
-
----
-"Get /index,index/_mapping/{type}":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_1,test_2
- type: doc
-
- - is_true: test_1.mappings.doc
- - is_true: test_2.mappings.doc
-
----
-"Get /index*/_mapping/{type}":
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: '*2'
- type: doc
-
- - is_true: test_2.mappings.doc
- - is_false: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml
deleted file mode 100644
index f17fb6a595305..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml
+++ /dev/null
@@ -1,106 +0,0 @@
----
-"Non-existent type returns 404":
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- catch: missing
- indices.get_mapping:
- include_type_name: true
- index: test_index
- type: not_test_type
-
- - match: { status: 404 }
- - match: { error.reason: 'type[[not_test_type]] missing' }
-
----
-"No type matching pattern returns 404":
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- catch: missing
- indices.get_mapping:
- include_type_name: true
- index: test_index
- type: test*,not*
-
- - match: { status: 404 }
- - match: { error: 'type [not*] missing' }
- - is_true: test_index.mappings.test_type
-
----
-"Existent and non-existent type returns 404 and the existing type":
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- catch: missing
- indices.get_mapping:
- include_type_name: true
- index: test_index
- type: test_type,not_test_type
-
- - match: { status: 404 }
- - match: { error: 'type [not_test_type] missing' }
- - is_true: test_index.mappings.test_type
-
----
-"Existent and non-existent types returns 404 and the existing type":
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- mappings:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- catch: missing
- indices.get_mapping:
- include_type_name: true
- index: test_index
- type: test_type,not_test_type,another_not_test_type
-
- - match: { status: 404 }
- - match: { error: 'types [another_not_test_type,not_test_type] missing' }
- - is_true: test_index.mappings.test_type
-
----
-"Type missing when no types exist":
- - do:
- catch: missing
- indices.get_mapping:
- include_type_name: true
- type: not_test_type
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml
index 5a7624265ecc9..1bbfbc4f4c967 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml
@@ -4,7 +4,7 @@
catch: missing
indices.get_mapping:
index: test_index
-
+
---
"Index missing, no indexes":
- do:
@@ -14,9 +14,6 @@
---
"Index missing, ignore_unavailable=true":
- - skip:
- version: " - 6.99.99"
- reason: ignore_unavailable was ignored in previous versions
- do:
indices.get_mapping:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml
index 15a52b7b2db25..956b80ce16b52 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml
@@ -18,7 +18,6 @@
- do:
indices.get_mapping:
- include_type_name: false
index: test_alias
- match: {test_index.mappings.properties.text.type: text}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml
index d3f15b3292285..7f6f3999c868d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml
@@ -102,9 +102,6 @@ setup:
---
"Get test-* with wildcard_expansion=none":
- - skip:
- version: " - 6.99.99"
- reason: allow_no_indices (defaults to true) was ignored in previous versions
- do:
indices.get_mapping:
index: test-x*
@@ -113,9 +110,6 @@ setup:
- match: { '': {} }
---
"Get test-* with wildcard_expansion=none allow_no_indices=false":
- - skip:
- version: " - 6.99.99"
- reason: allow_no_indices was ignored in previous versions
- do:
catch: missing
indices.get_mapping:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml
deleted file mode 100644
index 6da7f4a2c6946..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-setup:
-
- - do:
- indices.create:
- index: test_1
-
- - do:
- indices.create:
- index: test_2
-
----
-"Check empty mapping when getting all mappings via /_mapping":
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - match: { test_1.mappings: {}}
- - match: { test_2.mappings: {}}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml
deleted file mode 100644
index 162a8d340d48a..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,23 +0,0 @@
----
-"GET mapping with typeless API on an index that has types":
-
- - skip:
- version: " - 6.99.99"
- reason: include_type_name was introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- indices.get_mapping:
- index: index
-
- - match: { index.mappings.properties.foo.type: "keyword" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/30_defaults.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/30_defaults.yml
index 2e3f4af03ebef..83e77140facbc 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/30_defaults.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/30_defaults.yml
@@ -10,9 +10,6 @@ setup:
index: test-index
---
Test retrieval of default settings:
- - skip:
- version: " - 6.3.99"
- reason: include_defaults will not work in mixed-mode clusters containing nodes pre-6.4
- do:
indices.get_settings:
flat_settings: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml
index c1aac94bf1d84..9becbd54a3773 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml
@@ -1,7 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
- do:
indices.put_template:
name: test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml
index eb9f834ef4979..35e4c29f27d3e 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml
@@ -38,10 +38,6 @@
---
"Open index with wait_for_active_shards set to all":
- - skip:
- version: " - 6.0.99"
- reason: wait_for_active_shards parameter was added in 6.1.0
-
- do:
indices.create:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml
index ff68b04f20609..77338a6ddae0b 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml
@@ -59,11 +59,6 @@
---
"Can set is_write_index":
-
- - skip:
- version: " - 6.3.99"
- reason: "is_write_index is only available from 6.4.0 on"
-
- do:
indices.create:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml
index 338eaba8881c3..36317c7ae173c 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml
@@ -1,8 +1,5 @@
---
"Test Create and update mapping":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
- do:
indices.create:
index: test_index
@@ -53,10 +50,6 @@
---
"Create index with invalid mappings":
-
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
- do:
indices.create:
index: test_index
@@ -72,14 +65,38 @@
---
"Put mappings with explicit _doc type":
- skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
+ version: " - 1.99.99"
+ reason: "deprecation message changed in 2.0"
+ - do:
+ indices.create:
+ index: test_index
+
+ - do:
+ catch: bad_request
+ indices.put_mapping:
+ index: test_index
+ body:
+ _doc:
+ properties:
+ field:
+ type: keyword
+
+ - match: { error.type: "illegal_argument_exception" }
+ - match: { error.reason: "Types cannot be provided in put mapping requests" }
+---
+"Put mappings with explicit _doc type bwc":
+ - skip:
+ version: "2.0.0 - "
+ reason: "old deprecation message for pre 2.0"
+ features: "node_selector"
- do:
indices.create:
index: test_index
- do:
+ node_selector:
+ version: " - 1.99.99"
catch: bad_request
indices.put_mapping:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml
deleted file mode 100644
index 5da9cd4bf707c..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml
+++ /dev/null
@@ -1,74 +0,0 @@
----
-"Test Create and update mapping":
- - do:
- indices.create:
- index: test_index
-
- - do:
- indices.put_mapping:
- include_type_name: true
- index: test_index
- type: test_type
- body:
- test_type:
- properties:
- text1:
- type: text
- analyzer: whitespace
- text2:
- type: text
- analyzer: whitespace
- subfield.text3:
- type: text
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_index
-
- - match: {test_index.mappings.test_type.properties.text1.type: text}
- - match: {test_index.mappings.test_type.properties.text1.analyzer: whitespace}
- - match: {test_index.mappings.test_type.properties.text2.type: text}
- - match: {test_index.mappings.test_type.properties.text2.analyzer: whitespace}
-
- - do:
- indices.put_mapping:
- include_type_name: true
- index: test_index
- type: test_type
- body:
- test_type:
- properties:
- text1:
- type: text
- analyzer: whitespace
- fields:
- text_raw:
- type: keyword
-
-
- - do:
- indices.get_mapping:
- include_type_name: true
- index: test_index
-
- - match: {test_index.mappings.test_type.properties.text1.type: text}
- - match: {test_index.mappings.test_type.properties.subfield.properties.text3.type: text}
- - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword}
-
----
-"Create index with invalid mappings":
- - do:
- indices.create:
- index: test_index
- - do:
- catch: /illegal_argument_exception/
- indices.put_mapping:
- include_type_name: true
- index: test_index
- type: test_type
- body:
- test_type:
- properties:
- "":
- type: keyword
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml
deleted file mode 100644
index 13cb3321841cf..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,83 +0,0 @@
----
-"PUT mapping with typeless API on an index that has types":
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- indices.put_mapping:
- include_type_name: false
- index: index
- body:
- properties:
- bar:
- type: "long"
-
- - do:
- indices.get_mapping:
- include_type_name: false
- index: index
-
- - match: { index.mappings.properties.foo.type: "keyword" }
- - match: { index.mappings.properties.bar.type: "long" }
-
- - do:
- indices.put_mapping:
- include_type_name: false
- index: index
- body:
- properties:
- foo:
- type: "keyword" # also test no-op updates that trigger special logic wrt the mapping version
-
- - do:
- catch: /the final mapping would have more than 1 type/
- indices.put_mapping:
- include_type_name: true
- index: index
- type: some_other_type
- body:
- some_other_type:
- properties:
- bar:
- type: "long"
-
-
----
-"PUT mapping with _doc on an index that has types":
-
- - skip:
- version: " - 6.6.99"
- reason: include_type_name is only supported as of 6.7
-
-
- - do:
- indices.create:
- include_type_name: true
- index: index
- body:
- mappings:
- my_type:
- properties:
- foo:
- type: "keyword"
-
- - do:
- catch: /the final mapping would have more than 1 type/
- indices.put_mapping:
- include_type_name: true
- index: index
- type: _doc
- body:
- _doc:
- properties:
- bar:
- type: "long"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml
index 182ec017e0d30..c1daa76fe3d6e 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml
@@ -1,7 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
- do:
indices.create:
index: test_index1
@@ -162,4 +159,4 @@ setup:
indices.get_mapping: {}
- match: {test_index1.mappings.properties.text.type: text}
-
+
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml
deleted file mode 100644
index 6f9b6f7d9ceef..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml
+++ /dev/null
@@ -1,227 +0,0 @@
-setup:
- - do:
- indices.create:
- index: test_index1
- - do:
- indices.create:
- index: test_index2
- - do:
- indices.create:
- index: foo
-
-
----
-"put one mapping per index":
- - do:
- indices.put_mapping:
- include_type_name: true
- index: test_index1
- type: test_type
- body:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
- - do:
- indices.put_mapping:
- include_type_name: true
- index: test_index2
- type: test_type
- body:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - match: {test_index1.mappings.test_type.properties.text.type: text}
- - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {test_index2.mappings.test_type.properties.text.type: text}
- - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: { foo.mappings: {} }
-
----
-"put mapping in _all index":
-
- - do:
- indices.put_mapping:
- include_type_name: true
- index: _all
- type: test_type
- body:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - match: {test_index1.mappings.test_type.properties.text.type: text}
- - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {test_index2.mappings.test_type.properties.text.type: text}
- - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {foo.mappings.test_type.properties.text.type: text}
- - match: {foo.mappings.test_type.properties.text.analyzer: whitespace}
-
----
-"put mapping in * index":
- - do:
- indices.put_mapping:
- include_type_name: true
- index: "*"
- type: test_type
- body:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - match: {test_index1.mappings.test_type.properties.text.type: text}
- - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {test_index2.mappings.test_type.properties.text.type: text}
- - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {foo.mappings.test_type.properties.text.type: text}
- - match: {foo.mappings.test_type.properties.text.analyzer: whitespace}
-
----
-"put mapping in prefix* index":
- - do:
- indices.put_mapping:
- include_type_name: true
- index: "test_index*"
- type: test_type
- body:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - match: {test_index1.mappings.test_type.properties.text.type: text}
- - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {test_index2.mappings.test_type.properties.text.type: text}
- - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: { foo.mappings: {} }
-
----
-"put mapping in list of indices":
- - do:
- indices.put_mapping:
- include_type_name: true
- index: [test_index1, test_index2]
- type: test_type
- body:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - match: {test_index1.mappings.test_type.properties.text.type: text}
- - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {test_index2.mappings.test_type.properties.text.type: text}
- - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: { foo.mappings: {} }
-
----
-"put mapping with blank index":
- - do:
- indices.put_mapping:
- include_type_name: true
- type: test_type
- body:
- test_type:
- properties:
- text:
- type: text
- analyzer: whitespace
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - match: {test_index1.mappings.test_type.properties.text.type: text}
- - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {test_index2.mappings.test_type.properties.text.type: text}
- - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
-
- - match: {foo.mappings.test_type.properties.text.type: text}
- - match: {foo.mappings.test_type.properties.text.analyzer: whitespace}
-
----
-"put mapping with missing type":
-
-
- - do:
- catch: param
- indices.put_mapping:
- include_type_name: true
-
----
-"post a mapping with default analyzer twice":
-
- - do:
- indices.put_mapping:
- include_type_name: true
- index: test_index1
- type: test_type
- body:
- test_type:
- dynamic: false
- properties:
- text:
- analyzer: default
- type: text
-
- - do:
- indices.put_mapping:
- include_type_name: true
- index: test_index1
- type: test_type
- body:
- test_type:
- dynamic: false
- properties:
- text:
- analyzer: default
- type: text
-
- - do:
- indices.get_mapping:
- include_type_name: true
-
- - match: {test_index1.mappings.test_type.properties.text.type: text}
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml
index f7a83442ca2e2..0b4e34d2740b5 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml
@@ -1,9 +1,5 @@
---
"Put template":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
-
- do:
indices.put_template:
name: test
@@ -28,10 +24,6 @@
---
"Put multiple template":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
-
- do:
indices.put_template:
name: test
@@ -56,10 +48,6 @@
---
"Put template with empty mappings":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
-
- do:
indices.put_template:
name: test
@@ -241,10 +229,6 @@
---
"Put template with explicit _doc type":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
-
- do:
catch: bad_request
indices.put_template:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yml
index 342adced0640d..dc68ffc9a3b86 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yml
@@ -16,20 +16,17 @@
- do:
index:
index: logs-1
- type: test
id: "1"
body: { "foo": "hello world" }
# make this doc visible in index stats
refresh: true
-
+
- do:
get:
index: logs_search
- type: test
id: "1"
- match: { _index: logs-1 }
- - match: { _type: test }
- match: { _id: "1" }
- match: { _source: { foo: "hello world" } }
@@ -59,7 +56,6 @@
- do:
index:
index: logs-000002
- type: test
id: "2"
body: { "foo": "hello world" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml
index ec9fabe02595d..f5d223259dc06 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml
@@ -17,7 +17,6 @@
- do:
index:
index: logs-1
- type: test
id: "1"
body: { "foo": "hello world" }
refresh: true
@@ -38,7 +37,6 @@
- do:
index:
index: logs-1
- type: test
id: "2"
body: { "foo": "hello world" }
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/30_max_size_condition.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/30_max_size_condition.yml
index 6e4df0f292915..95c0ff509f304 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/30_max_size_condition.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/30_max_size_condition.yml
@@ -1,10 +1,5 @@
---
"Rollover with max_size condition":
-
- - skip:
- version: " - 6.0.99"
- reason: max_size condition is introduced in 6.1.0
-
# create index with alias and replica
- do:
indices.create:
@@ -18,7 +13,6 @@
- do:
index:
index: logs-1
- type: doc
id: "1"
body: { "foo": "hello world" }
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml
index 47b004326a457..040ffd534c0ab 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml
@@ -1,9 +1,5 @@
---
"Typeless mapping":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
-
- do:
indices.create:
index: logs-1
@@ -44,10 +40,6 @@
---
"Mappings with explicit _doc type":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0
-
- do:
indices.create:
index: logs-1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/41_mapping_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/41_mapping_with_types.yml
deleted file mode 100644
index 36389f3ce8bba..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/41_mapping_with_types.yml
+++ /dev/null
@@ -1,47 +0,0 @@
----
-"Typeless mapping":
- - skip:
- version: " - 6.99.99"
- reason: include_type_name defaults to true before 7.0.0
-
- - do:
- indices.create:
- index: logs-1
- body:
- aliases:
- logs_search: {}
-
- # index first document and wait for refresh
- - do:
- index:
- index: logs-1
- type: test
- id: "1"
- body: { "foo": "hello world" }
- refresh: true
-
- # index second document and wait for refresh
- - do:
- index:
- index: logs-1
- type: test
- id: "2"
- body: { "foo": "hello world" }
- refresh: true
-
- # perform alias rollover with new typeless mapping
- - do:
- indices.rollover:
- include_type_name: true
- alias: "logs_search"
- body:
- conditions:
- max_docs: 2
- mappings:
- _doc:
- properties:
- foo2:
- type: keyword
-
- - match: { conditions: { "[max_docs: 2]": true } }
- - match: { rolled_over: true }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.segments/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.segments/10_basic.yml
index 37602774474a1..bda7788354b47 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.segments/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.segments/10_basic.yml
@@ -25,7 +25,6 @@
- do:
index:
index: index1
- type: type
body: { foo: bar }
refresh: true
@@ -53,7 +52,6 @@
- do:
index:
index: index1
- type: type
body: { foo: bar }
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shard_stores/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shard_stores/10_basic.yml
index b6f1c7e552790..1f621c2e50b9d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shard_stores/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shard_stores/10_basic.yml
@@ -24,7 +24,6 @@
- do:
index:
index: index1
- type: type
body: { foo: bar }
refresh: true
@@ -59,13 +58,11 @@
- do:
index:
index: index1
- type: type
body: { foo: bar }
refresh: true
- do:
index:
index: index2
- type: type
body: { foo: bar }
refresh: true
- do:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml
index 41c851b71cc6c..a5b1cb8607b3a 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml
@@ -1,10 +1,5 @@
---
"Shrink index via API":
- - skip:
- version: " - 6.9.99"
- reason: expects warnings that pre-7.0.0 will not send
- features: [warnings, arbitrary_key]
-
# creates an index with one document solely allocated on a particular data node
# and shrinks it into a new index with a single shard
# we don't do the relocation to a single node after the index is created
@@ -40,7 +35,6 @@
id: "1"
- match: { _index: source }
- - match: { _type: _doc }
- match: { _id: "1" }
- match: { _source: { foo: "hello world" } }
@@ -78,6 +72,5 @@
id: "1"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "1" }
- match: { _source: { foo: "hello world" } }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml
index 8fe8643d049ea..a744895c4ce38 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml
@@ -1,10 +1,7 @@
---
"Copy settings during shrink index":
- skip:
- version: " - 6.9.99"
- reason: expects warnings that pre-7.0.0 will not send
- features: [allowed_warnings, arbitrary_key]
-
+ features: allowed_warnings
- do:
nodes.info:
node_id: data:true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml
index 2baa82ea78842..4ae1d0002a237 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml
@@ -29,11 +29,6 @@ setup:
---
"Split index via API":
- - skip:
- version: " - 6.9.99"
- reason: pre-7.0.0 will send warnings
- features: "warnings"
-
# make it read-only
- do:
indices.put_settings:
@@ -69,7 +64,6 @@ setup:
id: "1"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "1" }
- match: { _source: { foo: "hello world" } }
@@ -80,7 +74,6 @@ setup:
id: "2"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "2" }
- match: { _source: { foo: "hello world 2" } }
@@ -91,16 +84,12 @@ setup:
id: "3"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "3" }
- match: { _source: { foo: "hello world 3" } }
---
"Split from 1 to N":
- - skip:
- version: " - 6.99.99"
- reason: automatic preparation for splitting was added in 7.0.0
- do:
indices.create:
index: source_one_shard
@@ -162,7 +151,6 @@ setup:
id: "1"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "1" }
- match: { _source: { foo: "hello world" } }
@@ -173,7 +161,6 @@ setup:
id: "2"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "2" }
- match: { _source: { foo: "hello world 2" } }
@@ -184,17 +171,11 @@ setup:
id: "3"
- match: { _index: target }
- - match: { _type: _doc }
- match: { _id: "3" }
- match: { _source: { foo: "hello world 3" } }
---
"Create illegal split indices":
- - skip:
- version: " - 6.9.99"
- reason: pre-7.0.0 will send warnings
- features: "warnings"
-
# try to do an illegal split with number_of_routing_shards set
- do:
catch: /illegal_argument_exception/
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml
index 3740167a0253a..c86e49aac0561 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml
@@ -1,10 +1,5 @@
---
"Split index ignores target template mapping":
- - skip:
- version: " - 6.9.99"
- reason: pre-7.0.0 will send warnings
- features: "warnings"
-
# create index
- do:
indices.create:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml
index 46517f6055f32..0ceacf1f064ca 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml
@@ -1,10 +1,7 @@
---
"Copy settings during split index":
- skip:
- version: " - 6.9.99"
- reason: expects warnings that pre-7.0.0 will not send
- features: [arbitrary_key, allowed_warnings]
-
+ features: allowed_warnings
- do:
nodes.info:
node_id: data:true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml
index 1a650ee88eae6..54b4163ee6502 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml
@@ -39,10 +39,6 @@ setup:
---
"Index - all":
- - skip:
- version: " - 6.3.99"
- reason: "uuid is only available from 6.4.0 on"
-
- do:
indices.stats: { index: _all }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/15_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/15_types.yml
deleted file mode 100644
index e2f31c3405707..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/15_types.yml
+++ /dev/null
@@ -1,81 +0,0 @@
----
-setup:
-
- - do:
- index:
- index: test1
- type: bar
- id: 1
- body: { "bar": "bar", "baz": "baz" }
-
- - do:
- index:
- index: test2
- type: baz
- id: 1
- body: { "bar": "bar", "baz": "baz" }
-
-
----
-"Types - blank":
- - do:
- indices.stats: {}
-
- - match: { _all.primaries.indexing.index_total: 2 }
- - is_false: _all.primaries.indexing.types
-
----
-"Types - one":
- - do:
- indices.stats: { types: bar }
-
- - match: { _all.primaries.indexing.types.bar.index_total: 1 }
- - is_false: _all.primaries.indexing.types.baz
-
----
-"Types - multi":
- - do:
- indices.stats: { types: "bar,baz" }
-
- - match: { _all.primaries.indexing.types.bar.index_total: 1 }
- - match: { _all.primaries.indexing.types.baz.index_total: 1 }
-
----
-"Types - star":
- - do:
- indices.stats: { types: "*" }
-
- - match: { _all.primaries.indexing.types.bar.index_total: 1 }
- - match: { _all.primaries.indexing.types.baz.index_total: 1 }
-
----
-"Types - pattern":
- - do:
- indices.stats: { types: "*r" }
-
- - match: { _all.primaries.indexing.types.bar.index_total: 1 }
- - is_false: _all.primaries.indexing.types.baz
-
----
-"Types - _all metric":
- - do:
- indices.stats: { types: bar, metric: _all }
-
- - match: { _all.primaries.indexing.types.bar.index_total: 1 }
- - is_false: _all.primaries.indexing.types.baz
-
----
-"Types - indexing metric":
- - do:
- indices.stats: { types: bar, metric: indexing }
-
- - match: { _all.primaries.indexing.types.bar.index_total: 1 }
- - is_false: _all.primaries.indexing.types.baz
-
----
-"Types - multi metric":
- - do:
- indices.stats: { types: bar, metric: [ indexing, search ] }
-
- - match: { _all.primaries.indexing.types.bar.index_total: 1 }
- - is_false: _all.primaries.indexing.types.baz
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml
index a308f798df692..e817bc0d27337 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml
@@ -1,83 +1,5 @@
---
-"Translog retention without soft_deletes":
- - skip:
- version: " - 7.5.99"
- reason: "indices without soft deletes are deprecated in 7.6"
- features: "allowed_warnings"
-
- - do:
- indices.create:
- index: test
- body:
- settings:
- soft_deletes.enabled: false
- allowed_warnings:
- - Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
- Please do not specify value for setting [index.soft_deletes.enabled] of index [test].
- - do:
- cluster.health:
- wait_for_no_initializing_shards: true
- wait_for_events: languid
- - do:
- indices.stats:
- metric: [ translog ]
- - set: { indices.test.primaries.translog.size_in_bytes: creation_size }
-
- - do:
- index:
- index: test
- id: 1
- body: { "foo": "bar" }
-
- - do:
- indices.stats:
- metric: [ translog ]
- - gt: { indices.test.primaries.translog.size_in_bytes: $creation_size }
- - match: { indices.test.primaries.translog.operations: 1 }
-# we can't check this yet as creation size will contain two empty translog generations. A single
-# non empty generation with one op may be smaller or larger than that.
-# - gt: { indices.test.primaries.translog.uncommitted_size_in_bytes: $creation_size }
- - match: { indices.test.primaries.translog.uncommitted_operations: 1 }
-
- - do:
- indices.flush:
- index: test
-
- - do:
- indices.stats:
- metric: [ translog ]
- - gt: { indices.test.primaries.translog.size_in_bytes: $creation_size }
- - match: { indices.test.primaries.translog.operations: 1 }
- ## creation translog size has some overhead due to an initial empty generation that will be trimmed later
- - lt: { indices.test.primaries.translog.uncommitted_size_in_bytes: $creation_size }
- - match: { indices.test.primaries.translog.uncommitted_operations: 0 }
-
- - do:
- indices.put_settings:
- index: test
- body:
- index.translog.retention.size: -1
- index.translog.retention.age: -1
-
- - do:
- indices.flush:
- index: test
- force: true # force flush as we don't have pending ops
-
- - do:
- indices.stats:
- metric: [ translog ]
- ## creation translog size has some overhead due to an initial empty generation that will be trimmed later
- - lte: { indices.test.primaries.translog.size_in_bytes: $creation_size }
- - match: { indices.test.primaries.translog.operations: 0 }
- - lte: { indices.test.primaries.translog.uncommitted_size_in_bytes: $creation_size }
- - match: { indices.test.primaries.translog.uncommitted_operations: 0 }
-
----
-"Translog retention with soft_deletes":
- - skip:
- version: " - 7.3.99"
- reason: "start ignoring translog retention policy with soft-deletes enabled in 7.4"
+"Translog retention":
- do:
indices.create:
index: test
@@ -164,9 +86,6 @@
---
"Translog last modified age stats":
- - skip:
- version: " - 6.2.99"
- reason: translog last modified age stats was added in 6.3.0
- do:
index:
index: test
@@ -179,70 +98,7 @@
- gte: { indices.test.primaries.translog.earliest_last_modified_age: 0 }
---
-"Translog stats on closed indices without soft-deletes":
- - skip:
- version: " - 7.5.99"
- reason: "indices without soft deletes are deprecated in 7.6"
- features: "allowed_warnings"
-
- - do:
- indices.create:
- index: test
- body:
- settings:
- soft_deletes.enabled: false
- routing.rebalance.enable: "none" # prevents shard relocations while we are closing an index
- allowed_warnings:
- - Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions.
- Please do not specify value for setting [index.soft_deletes.enabled] of index [test].
-
- - do:
- cluster.health:
- wait_for_no_initializing_shards: true
- wait_for_events: languid
- - do:
- index:
- index: test
- id: 1
- body: { "foo": "bar" }
-
- - do:
- index:
- index: test
- id: 2
- body: { "foo": "bar" }
-
- - do:
- index:
- index: test
- id: 3
- body: { "foo": "bar" }
-
- - do:
- indices.stats:
- metric: [ translog ]
- - match: { indices.test.primaries.translog.operations: 3 }
- - match: { indices.test.primaries.translog.uncommitted_operations: 3 }
-
- - do:
- indices.close:
- index: test
- wait_for_active_shards: 1
- - is_true: acknowledged
-
- - do:
- indices.stats:
- metric: [ translog ]
- expand_wildcards: all
- forbid_closed_indices: false
- - match: { indices.test.primaries.translog.operations: 3 }
- - match: { indices.test.primaries.translog.uncommitted_operations: 0 }
-
----
-"Translog stats on closed indices with soft-deletes":
- - skip:
- version: " - 7.3.99"
- reason: "start ignoring translog retention policy with soft-deletes enabled in 7.4"
+"Translog stats on closed indices":
- do:
indices.create:
index: test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml
index 798d699ae80a0..854c460c535c0 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml
@@ -1,9 +1,5 @@
---
"Basic multi-get":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
indices.create:
index: test_2
@@ -26,17 +22,14 @@
- is_false: docs.0.found
- match: { docs.0._index: test_2 }
- - match: { docs.0._type: null }
- match: { docs.0._id: "1" }
- is_false: docs.1.found
- match: { docs.1._index: test_1 }
- - match: { docs.1._type: _doc }
- match: { docs.1._id: "2" }
- is_true: docs.2.found
- match: { docs.2._index: test_1 }
- - match: { docs.2._type: _doc }
- match: { docs.2._id: "1" }
- match: { docs.2._version: 1 }
- match: { docs.2._source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/11_default_index_type.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/11_default_index_type.yml
deleted file mode 100644
index 773b7e3bcfe6b..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/11_default_index_type.yml
+++ /dev/null
@@ -1,44 +0,0 @@
----
-"Default index/type":
- - do:
- indices.create:
- index: test_2
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- mget:
- index: test_1
- type: test
- body:
- docs:
- - { _index: test_2, _id: 1}
- - { _type: none, _id: 1}
- - { _id: 2}
- - { _id: 1}
-
- - is_false: docs.0.found
- - match: { docs.0._index: test_2 }
- - match: { docs.0._type: test }
- - match: { docs.0._id: "1" }
-
- - is_false: docs.1.found
- - match: { docs.1._index: test_1 }
- - match: { docs.1._type: none }
- - match: { docs.1._id: "1" }
-
- - is_false: docs.2.found
- - match: { docs.2._index: test_1 }
- - match: { docs.2._type: test }
- - match: { docs.2._id: "2" }
-
- - is_true: docs.3.found
- - match: { docs.3._index: test_1 }
- - match: { docs.3._type: test }
- - match: { docs.3._id: "1" }
- - match: { docs.3._version: 1 }
- - match: { docs.3._source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml
index a1101a903f896..38ca8da20dd5d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml
@@ -1,9 +1,5 @@
---
"Non-existent index":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
@@ -18,7 +14,6 @@
- is_false: docs.0.found
- match: { docs.0._index: test_2 }
- - match: { docs.0._type: null }
- match: { docs.0._id: "1" }
- do:
@@ -29,5 +24,4 @@
- is_true: docs.0.found
- match: { docs.0._index: test_1 }
- - match: { docs.0._type: _doc }
- match: { docs.0._id: "1" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml
index 2711bed58dbb1..eb46d45f027d3 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml
@@ -1,9 +1,5 @@
---
"Missing metadata":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
@@ -43,7 +39,6 @@
- is_true: docs.0.found
- match: { docs.0._index: test_1 }
- - match: { docs.0._type: _doc }
- match: { docs.0._id: "1" }
- match: { docs.0._version: 1 }
- match: { docs.0._source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml
index 9c1d0242b05c9..825dc256d786a 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml
@@ -8,11 +8,11 @@
bulk:
refresh: true
body: |
- {"index": {"_index": "test_1", "_type": "_doc", "_id": 1}}
+ {"index": {"_index": "test_1", "_id": 1}}
{ "foo": "bar" }
- {"index": {"_index": "test_2", "_type": "_doc", "_id": 2}}
+ {"index": {"_index": "test_2", "_id": 2}}
{ "foo": "bar" }
- {"index": {"_index": "test_3", "_type": "_doc", "_id": 3}}
+ {"index": {"_index": "test_3", "_id": 3}}
{ "foo": "bar" }
- do:
@@ -34,12 +34,10 @@
- is_true: docs.0.found
- match: { docs.0._index: test_1 }
- - match: { docs.0._type: _doc }
- match: { docs.0._id: "1" }
- is_false: docs.1.found
- match: { docs.1._index: test_two_and_three }
- - match: { docs.1._type: null }
- match: { docs.1._id: "2" }
- match: { docs.1.error.root_cause.0.type: "illegal_argument_exception" }
- match: { docs.1.error.root_cause.0.reason: "/[aA]lias.\\[test_two_and_three\\].has.more.than.one.index.associated.with.it.\\[test_[23]{1},.test_[23]{1}\\],.can't.execute.a.single.index.op/" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml
index fbdc9b265a95a..f71b5e86dab56 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml
@@ -1,9 +1,5 @@
---
"IDs":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
indices.create:
index: test_1
@@ -28,14 +24,12 @@
- is_true: docs.0.found
- match: { docs.0._index: test_1 }
- - match: { docs.0._type: _doc }
- match: { docs.0._id: "1" }
- match: { docs.0._version: 1 }
- match: { docs.0._source: { foo: bar }}
- is_false: docs.1.found
- match: { docs.1._index: test_1 }
- - match: { docs.1._type: _doc }
- match: { docs.1._id: "3" }
- do:
@@ -46,14 +40,12 @@
- is_true: docs.0.found
- match: { docs.0._index: test_1 }
- - match: { docs.0._type: _doc }
- match: { docs.0._id: "1" }
- match: { docs.0._version: 1 }
- match: { docs.0._source: { foo: bar }}
- is_true: docs.1.found
- match: { docs.1._index: test_1 }
- - match: { docs.1._type: _doc }
- match: { docs.1._id: "2" }
- match: { docs.1._version: 1 }
- match: { docs.1._source: { foo: baz }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/16_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/16_basic_with_types.yml
deleted file mode 100644
index 0850772ad426c..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/16_basic_with_types.yml
+++ /dev/null
@@ -1,45 +0,0 @@
----
-"Basic multi-get":
- - do:
- indices.create:
- index: test_2
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- indices.refresh: {}
-
- - do:
- mget:
- body:
- docs:
- - { _index: test_2, _type: test, _id: 1}
- - { _index: test_1, _type: none, _id: 1}
- - { _index: test_1, _type: test, _id: 2}
- - { _index: test_1, _type: test, _id: 1}
-
- - is_false: docs.0.found
- - match: { docs.0._index: test_2 }
- - match: { docs.0._type: test }
- - match: { docs.0._id: "1" }
-
- - is_false: docs.1.found
- - match: { docs.1._index: test_1 }
- - match: { docs.1._type: none }
- - match: { docs.1._id: "1" }
-
- - is_false: docs.2.found
- - match: { docs.2._index: test_1 }
- - match: { docs.2._type: test }
- - match: { docs.2._id: "2" }
-
- - is_true: docs.3.found
- - match: { docs.3._index: test_1 }
- - match: { docs.3._type: test }
- - match: { docs.3._id: "1" }
- - match: { docs.3._version: 1 }
- - match: { docs.3._source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml
index d03f99be39517..15fd4320851e6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml
@@ -1,9 +1,5 @@
---
"Default index/type":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
indices.create:
index: test_2
@@ -24,17 +20,14 @@
- is_false: docs.0.found
- match: { docs.0._index: test_2 }
- - match: { docs.0._type: null }
- match: { docs.0._id: "1" }
- is_false: docs.1.found
- match: { docs.1._index: test_1 }
- - match: { docs.1._type: _doc }
- match: { docs.1._id: "2" }
- is_true: docs.2.found
- match: { docs.2._index: test_1 }
- - match: { docs.2._type: _doc }
- match: { docs.2._id: "1" }
- match: { docs.2._version: 1 }
- match: { docs.2._source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/18_non_existent_index_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/18_non_existent_index_with_types.yml
deleted file mode 100644
index 0623464225072..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/18_non_existent_index_with_types.yml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-"Non-existent index":
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- mget:
- body:
- docs:
- - { _index: test_2, _type: test, _id: 1}
-
- - is_false: docs.0.found
- - match: { docs.0._index: test_2 }
- - match: { docs.0._type: test }
- - match: { docs.0._id: "1" }
-
- - do:
- mget:
- body:
- docs:
- - { _index: test_1, _type: test, _id: 1}
-
- - is_true: docs.0.found
- - match: { docs.0._index: test_1 }
- - match: { docs.0._type: test }
- - match: { docs.0._id: "1" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/19_missing_metadata_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/19_missing_metadata_with_types.yml
deleted file mode 100644
index d7af1797f7a40..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/19_missing_metadata_with_types.yml
+++ /dev/null
@@ -1,47 +0,0 @@
----
-"Missing metadata":
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- catch: /action_request_validation_exception.+ id is missing/
- mget:
- body:
- docs:
- - { _index: test_1, _type: test}
-
- - do:
- catch: /action_request_validation_exception.+ index is missing/
- mget:
- body:
- docs:
- - { _type: test, _id: 1}
-
- - do:
- catch: /action_request_validation_exception.+ no documents to get/
- mget:
- body:
- docs: []
-
- - do:
- catch: /action_request_validation_exception.+ no documents to get/
- mget:
- body: {}
-
- - do:
- mget:
- body:
- docs:
- - { _index: test_1, _id: 1}
-
- - is_true: docs.0.found
- - match: { docs.0._index: test_1 }
- - match: { docs.0._type: test }
- - match: { docs.0._id: "1" }
- - match: { docs.0._version: 1 }
- - match: { docs.0._source: { foo: bar }}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml
index 45460deb04e0b..1c965a9573ae0 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml
@@ -1,9 +1,5 @@
---
"Stored fields":
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
indices.create:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/21_alias_to_multiple_indices_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/21_alias_to_multiple_indices_with_types.yml
deleted file mode 100644
index 8b8ff6cbdb76c..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/21_alias_to_multiple_indices_with_types.yml
+++ /dev/null
@@ -1,45 +0,0 @@
----
-"Multi Get with alias that resolves to multiple indices":
- - skip:
- version: " - 7.8.99"
- reason: "message was changed to fix grammar in 7.9"
-
- - do:
- bulk:
- refresh: true
- body: |
- {"index": {"_index": "test_1", "_type": "test", "_id": 1}}
- { "foo": "bar" }
- {"index": {"_index": "test_2", "_type": "test", "_id": 2}}
- { "foo": "bar" }
- {"index": {"_index": "test_3", "_type": "test", "_id": 3}}
- { "foo": "bar" }
-
- - do:
- indices.put_alias:
- index: test_2
- name: test_two_and_three
-
- - do:
- indices.put_alias:
- index: test_3
- name: test_two_and_three
-
- - do:
- mget:
- body:
- docs:
- - { _index: test_1, _type: test, _id: 1}
- - { _index: test_two_and_three, _type: test, _id: 2}
-
- - is_true: docs.0.found
- - match: { docs.0._index: test_1 }
- - match: { docs.0._type: test }
- - match: { docs.0._id: "1" }
-
- - is_false: docs.1.found
- - match: { docs.1._index: test_two_and_three }
- - match: { docs.1._type: test }
- - match: { docs.1._id: "2" }
- - match: { docs.1.error.root_cause.0.type: "illegal_argument_exception" }
- - match: { docs.1.error.root_cause.0.reason: "/[aA]lias.\\[test_two_and_three\\].has.more.than.one.index.associated.with.it.\\[test_[23]{1},.test_[23]{1}\\],.can't.execute.a.single.index.op/" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/22_ids_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/22_ids_with_types.yml
deleted file mode 100644
index 6c233e4d92a9c..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/22_ids_with_types.yml
+++ /dev/null
@@ -1,72 +0,0 @@
----
-"IDs":
- - do:
- indices.create:
- index: test_1
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- index:
- index: test_1
- type: test
- id: 2
- body: { foo: baz }
-
- - do:
- mget:
- index: test_1
- type: test
- body:
- ids: [1, 3]
-
- - is_true: docs.0.found
- - match: { docs.0._index: test_1 }
- - match: { docs.0._type: test }
- - match: { docs.0._id: "1" }
- - match: { docs.0._version: 1 }
- - match: { docs.0._source: { foo: bar }}
-
- - is_false: docs.1.found
- - match: { docs.1._index: test_1 }
- - match: { docs.1._type: test }
- - match: { docs.1._id: "3" }
-
- - do:
- mget:
- index: test_1
- body:
- ids: [1, 2]
-
- - is_true: docs.0.found
- - match: { docs.0._index: test_1 }
- - match: { docs.0._type: test }
- - match: { docs.0._id: "1" }
- - match: { docs.0._version: 1 }
- - match: { docs.0._source: { foo: bar }}
-
- - is_true: docs.1.found
- - match: { docs.1._index: test_1 }
- - match: { docs.1._type: test }
- - match: { docs.1._id: "2" }
- - match: { docs.1._version: 1 }
- - match: { docs.1._source: { foo: baz }}
-
-
- - do:
- catch: /action_request_validation_exception.+ no documents to get/
- mget:
- index: test_1
- body:
- ids: []
-
- - do:
- catch: /action_request_validation_exception.+ no documents to get/
- mget:
- index: test_1
- body: {}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/23_stored_fields_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/23_stored_fields_with_types.yml
deleted file mode 100644
index 05b9738d46180..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/23_stored_fields_with_types.yml
+++ /dev/null
@@ -1,120 +0,0 @@
----
-"Stored fields":
-
- - do:
- indices.create:
- include_type_name: true
- index: test_1
- body:
- mappings:
- test:
- properties:
- foo:
- type: keyword
- store: true
- count:
- type: integer
- store: true
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- mget:
- index: test_1
- type: test
- body:
- docs:
- - { _id: 1 }
- - { _id: 1, stored_fields: foo }
- - { _id: 1, stored_fields: [foo] }
- - { _id: 1, stored_fields: [foo, _source] }
-
- - is_false: docs.0.fields
- - match: { docs.0._source: { foo: bar }}
-
- - match: { docs.1.fields.foo: [bar] }
- - is_false: docs.1._source
-
- - match: { docs.2.fields.foo: [bar] }
- - is_false: docs.2._source
-
- - match: { docs.3.fields.foo: [bar] }
- - match: { docs.3._source: { foo: bar }}
-
- - do:
- mget:
- index: test_1
- type: test
- stored_fields: foo
- body:
- docs:
- - { _id: 1 }
- - { _id: 1, stored_fields: foo }
- - { _id: 1, stored_fields: [foo] }
- - { _id: 1, stored_fields: [foo, _source] }
-
- - match: { docs.0.fields.foo: [bar] }
- - is_false: docs.0._source
-
- - match: { docs.1.fields.foo: [bar] }
- - is_false: docs.1._source
-
- - match: { docs.2.fields.foo: [bar] }
- - is_false: docs.2._source
-
- - match: { docs.3.fields.foo: [bar] }
- - match: { docs.3._source: { foo: bar }}
-
- - do:
- mget:
- index: test_1
- type: test
- stored_fields: [foo]
- body:
- docs:
- - { _id: 1 }
- - { _id: 1, stored_fields: foo }
- - { _id: 1, stored_fields: [foo] }
- - { _id: 1, stored_fields: [foo, _source] }
-
- - match: { docs.0.fields.foo: [bar] }
- - is_false: docs.0._source
-
- - match: { docs.1.fields.foo: [bar] }
- - is_false: docs.1._source
-
- - match: { docs.2.fields.foo: [bar] }
- - is_false: docs.2._source
-
- - match: { docs.3.fields.foo: [bar] }
- - match: { docs.3._source: { foo: bar }}
-
- - do:
- mget:
- index: test_1
- type: test
- stored_fields: [foo, _source]
- body:
- docs:
- - { _id: 1 }
- - { _id: 1, stored_fields: foo }
- - { _id: 1, stored_fields: [foo] }
- - { _id: 1, stored_fields: [foo, _source] }
-
- - match: { docs.0.fields.foo: [bar] }
- - match: { docs.0._source: { foo: bar }}
-
- - match: { docs.1.fields.foo: [bar] }
- - is_false: docs.1._source
-
- - match: { docs.2.fields.foo: [bar] }
- - is_false: docs.2._source
-
- - match: { docs.3.fields.foo: [bar] }
- - match: { docs.3._source: { foo: bar }}
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml
index df2924f274bdf..50bf9a158852b 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml
@@ -40,6 +40,5 @@
- is_true: docs.2.found
- match: { docs.2._index: test_1 }
- - match: { docs.2._type: _doc }
- match: { docs.2._id: "1" }
- match: { docs.2._routing: "5" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/41_routing_with_types.yml
deleted file mode 100644
index d550dd26657c9..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/41_routing_with_types.yml
+++ /dev/null
@@ -1,44 +0,0 @@
----
-"Routing":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- number_of_shards: 5
- number_of_routing_shards: 5
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- routing: 5
- body: { foo: bar }
-
- - do:
- mget:
- index: test_1
- type: test
- stored_fields: [_routing]
- body:
- docs:
- - { _id: 1 }
- - { _id: 1, routing: 4 }
- - { _id: 1, routing: 5 }
-
- - is_false: docs.0.found
- - is_false: docs.1.found
-
- - is_true: docs.2.found
- - match: { docs.2._index: test_1 }
- - match: { docs.2._type: test }
- - match: { docs.2._id: "1" }
- - match: { docs.2._routing: "5" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/61_realtime_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/61_realtime_refresh_with_types.yml
deleted file mode 100644
index 0cb7b71cf4368..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/61_realtime_refresh_with_types.yml
+++ /dev/null
@@ -1,53 +0,0 @@
----
-"Realtime Refresh":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- refresh_interval: -1
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- mget:
- index: test_1
- type: test
- realtime: false
- body:
- ids: [1]
-
- - is_false: docs.0.found
-
- - do:
- mget:
- index: test_1
- type: test
- realtime: true
- body:
- ids: [1]
-
- - is_true: docs.0.found
-
- - do:
- mget:
- index: test_1
- type: test
- realtime: false
- refresh: true
- body:
- ids: [1]
-
- - is_true: docs.0.found
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml
index 3a3086cf3616d..35a85cf9c0116 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml
@@ -1,8 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/71_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/71_source_filtering_with_types.yml
deleted file mode 100644
index 4581e060b41a7..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/71_source_filtering_with_types.yml
+++ /dev/null
@@ -1,119 +0,0 @@
-setup:
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 }
- - do:
- index:
- index: test_1
- type: test
- id: 2
- body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 }
-
----
-"Source filtering - true/false":
-
- - do:
- mget:
- body:
- docs:
- - { _index: "test_1", _type: "test", _id: "1", _source: false }
- - { _index: "test_1", _type: "test", _id: "2", _source: true }
-
- - match: { docs.0._id: "1" }
- - is_false: docs.0._source
- - match: { docs.1._id: "2" }
- - is_true: docs.1._source
-
----
-"Source filtering - include field":
-
- - do:
- mget:
- body:
- docs:
- - { _index: "test_1", _type: "test", _id: "1", _source: include.field1 }
- - { _index: "test_1", _type: "test", _id: "2", _source: [ include.field1 ] }
-
- - match: { docs.0._source: { include: { field1: v1 }} }
- - match: { docs.1._source: { include: { field1: v1 }} }
-
-
----
-"Source filtering - include nested field":
-
- - do:
- mget:
- body:
- docs:
- - { _index: "test_1", _type: "test", _id: "1", _source: { include: include.field1 } }
- - { _index: "test_1", _type: "test", _id: "2", _source: { include: [ include.field1 ] } }
-
- - match: { docs.0._source: { include: { field1: v1 }} }
- - match: { docs.1._source: { include: { field1: v1 }} }
-
----
-"Source filtering - exclude field":
-
- - do:
- mget:
- body:
- docs:
- - { _index: "test_1", _type: "test", _id: "1", _source: { include: [ include ], exclude: [ "*.field2" ] } }
-
- - match: { docs.0._source: { include: { field1: v1 }} }
-
----
-"Source filtering - ids and true/false":
-
- - do:
- mget:
- _source: false
- index: test_1
- body: { ids: [ 1,2 ] }
- - is_false: docs.0._source
- - is_false: docs.1._source
-
- - do:
- mget:
- _source: true
- index: test_1
- body: { ids: [ 1,2 ] }
- - is_true: docs.0._source
- - is_true: docs.1._source
-
----
-"Source filtering - ids and include field":
-
- - do:
- mget:
- _source: include.field1
- index: test_1
- body: { ids: [ 1,2 ] }
- - match: { docs.0._source: { include: { field1: v1 }} }
- - match: { docs.1._source: { include: { field1: v1 }} }
-
----
-"Source filtering - ids and include nested field":
-
- - do:
- mget:
- _source_includes: "include.field1,count"
- index: test_1
- body: { ids: [ 1,2 ] }
- - match: { docs.0._source: { include: { field1: v1 }, count: 1} }
- - match: { docs.1._source: { include: { field1: v1 }, count: 1} }
-
----
-"Source filtering - ids and exclude field":
-
- - do:
- mget:
- _source_includes: include
- _source_excludes: "*.field2"
- index: test_1
- body: { ids: [ 1,2 ] }
- - match: { docs.0._source: { include: { field1: v1 } } }
- - match: { docs.1._source: { include: { field1: v1 } } }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml
index 0283455350a80..2a1b4501eae17 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml
@@ -1,11 +1,6 @@
---
"Deprecated parameters should fail in Multi Get query":
- - skip:
- version: " - 6.99.99"
- reason: _version, _routing are removed starting from 7.0, their equivalents without underscore are used instead
- features: "warnings"
-
- do:
index:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated_with_types.yml
deleted file mode 100644
index 5033f75c79426..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated_with_types.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-
----
-"Deprecated parameters should fail in Multi Get query":
-
- - skip:
- version: " - 6.99.99"
- reason: _version, _routing are removed starting from 7.0, their equivalents without underscore are used instead
- features: "warnings"
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- index:
- index: test_1
- type: test
- id: 2
- body: { foo: baz }
-
- - do:
- catch: bad_request
- mget:
- body:
- docs:
- - { _index: test_1, _type: test, _id: 1, _routing : test1 }
- - { _index: test_1, _type: test, _id: 2, _routing : test1 }
-
- - do:
- catch: bad_request
- mget:
- body:
- docs:
- - { _index: test_1, _type: test, _id: 1, _version : 1 }
- - { _index: test_1, _type: test, _id: 2, _version : 1 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/20_docs.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/20_docs.yml
index bb1b25a0dcb40..50eb344d99048 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/20_docs.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/20_docs.yml
@@ -41,12 +41,10 @@
like:
-
_index: test_1
- _type: _doc
doc:
foo: bar
-
_index: test_1
- _type: _doc
_id: 2
-
_id: 3
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/30_unlike.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/30_unlike.yml
index abea4c8fbe57a..a0f96eb6b2d1f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/30_unlike.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/30_unlike.yml
@@ -40,11 +40,9 @@
more_like_this:
like:
_index: test_1
- _type: _doc
_id: 1
unlike:
_index: test_1
- _type: _doc
_id: 3
include: true
min_doc_freq: 0
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml
index 5b092c9d15e44..3a06946bffd70 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml
@@ -94,10 +94,6 @@ setup:
---
"Search with new response format":
- - skip:
- version: " - 6.99.99"
- reason: hits.total is returned as an object in 7.0.0
-
- do:
msearch:
body:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/12_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/12_basic_with_types.yml
deleted file mode 100644
index 64e88de404ab7..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/12_basic_with_types.yml
+++ /dev/null
@@ -1,97 +0,0 @@
----
-setup:
-
- - do:
- index:
- index: index_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- index:
- index: index_1
- type: test
- id: 2
- body: { foo: baz }
-
- - do:
- index:
- index: index_1
- type: test
- id: 3
- body: { foo: foo }
-
- - do:
- index:
- index: index_2
- type: test
- id: 1
- body: { foo: foo }
-
- - do:
- indices.refresh: {}
-
----
-"Basic multi-search":
-
- - do:
- msearch:
- rest_total_hits_as_int: true
- body:
- - index: index_*
- - query:
- match: {foo: foo}
- - index: index_2
- - query:
- match_all: {}
- - index: index_1
- - query:
- match: {foo: foo}
- - index: index_3
- - query:
- match_all: {}
- - type: test
- - query:
- match_all: {}
-
- - match: { responses.0.hits.total: 2 }
- - match: { responses.1.hits.total: 1 }
- - match: { responses.2.hits.total: 1 }
- - match: { responses.3.error.root_cause.0.type: index_not_found_exception }
- - match: { responses.3.error.root_cause.0.reason: "/no.such.index/" }
- - match: { responses.3.error.root_cause.0.index: index_3 }
- - match: { responses.4.hits.total: 4 }
-
----
-"Least impact smoke test":
-# only passing these parameters to make sure they are consumed
- - do:
- msearch:
- rest_total_hits_as_int: true
- max_concurrent_shard_requests: 1
- max_concurrent_searches: 1
- body:
- - index: index_*
- - query:
- match: {foo: foo}
- - index: index_2
- - query:
- match_all: {}
- - index: index_1
- - query:
- match: {foo: foo}
- - index: index_3
- - query:
- match_all: {}
- - type: test
- - query:
- match_all: {}
-
- - match: { responses.0.hits.total: 2 }
- - match: { responses.1.hits.total: 1 }
- - match: { responses.2.hits.total: 1 }
- - match: { responses.3.error.root_cause.0.type: index_not_found_exception }
- - match: { responses.3.error.root_cause.0.reason: "/no.such.index/" }
- - match: { responses.3.error.root_cause.0.index: index_3 }
- - match: { responses.4.hits.total: 4 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml
index 87c3e6065bba4..56e5b2eb88e0e 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml
@@ -1,7 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
indices.create:
index: testidx
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/11_basic_with_types.yml
deleted file mode 100644
index 0c037eee9ddd2..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/11_basic_with_types.yml
+++ /dev/null
@@ -1,86 +0,0 @@
-setup:
- - do:
- indices.create:
- include_type_name: true
- index: testidx
- body:
- mappings:
- testtype:
- properties:
- text:
- type : "text"
- term_vector : "with_positions_offsets"
- - do:
- index:
- index: testidx
- type: testtype
- id: testing_document
- body: {"text" : "The quick brown fox is brown."}
-
- - do:
- indices.refresh: {}
-
----
-"Basic tests for multi termvector get":
-
- - do:
- mtermvectors:
- "term_statistics" : true
- "body" :
- "docs":
- -
- "_index" : "testidx"
- "_type" : "testtype"
- "_id" : "testing_document"
-
- - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2}
- - match: {docs.0.term_vectors.text.terms.brown.ttf: 2}
-
- - do:
- mtermvectors:
- "term_statistics" : true
- "body" :
- "docs":
- -
- "_index" : "testidx"
- "_type" : "testtype"
- "_id" : "testing_document"
-
- - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2}
- - match: {docs.0.term_vectors.text.terms.brown.ttf: 2}
-
- - do:
- mtermvectors:
- "term_statistics" : true
- "index" : "testidx"
- "body" :
- "docs":
- -
- "_type" : "testtype"
- "_id" : "testing_document"
-
- - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2}
- - match: {docs.0.term_vectors.text.terms.brown.ttf: 2}
-
- - do:
- mtermvectors:
- "term_statistics" : true
- "index" : "testidx"
- "type" : "testtype"
- "body" :
- "docs":
- -
- "_id" : "testing_document"
-
- - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2}
- - match: {docs.0.term_vectors.text.terms.brown.ttf: 2}
-
- - do:
- mtermvectors:
- "term_statistics" : true
- "index" : "testidx"
- "type" : "testtype"
- "ids" : ["testing_document"]
-
- - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2}
- - match: {docs.0.term_vectors.text.terms.brown.ttf: 2}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml
index 376192680c99b..215c62c2a40ed 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml
@@ -1,16 +1,5 @@
-setup:
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
---
"Deprecated camel case and _ parameters should fail in Term Vectors query":
-
- - skip:
- version: " - 6.99.99"
- reason: camel case and _ parameters (e.g. versionType, _version_type) should fail from 7.0
- features: "warnings"
-
- do:
indices.create:
index: testidx
@@ -47,7 +36,6 @@ setup:
"docs":
-
"_index" : "testidx"
- "_type" : "_doc"
"_id" : "testing_document"
"version" : 1
"_version_type" : "external"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml
deleted file mode 100644
index b0335498e22a1..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml
+++ /dev/null
@@ -1,53 +0,0 @@
-
----
-"Deprecated camel case and _ parameters should fail in Term Vectors query":
-
- - skip:
- version: " - 6.99.99"
- reason: camel case and _ parameters (e.g. versionType, _version_type) should fail from 7.0
- features: "warnings"
-
- - do:
- indices.create:
- include_type_name: true
- index: testidx
- body:
- mappings:
- testtype:
- properties:
- text:
- type : "text"
- term_vector : "with_positions_offsets"
-
- - do:
- index:
- index: testidx
- type: testtype
- id: testing_document
- body: {"text" : "The quick brown fox is brown."}
-
- - do:
- catch: bad_request
- mtermvectors:
- "term_statistics" : true
- "body" :
- "docs":
- -
- "_index" : "testidx"
- "_type" : "testtype"
- "_id" : "testing_document"
- "version" : 1
- "versionType" : "external"
-
- - do:
- catch: bad_request
- mtermvectors:
- "term_statistics" : true
- "body" :
- "docs":
- -
- "_index" : "testidx"
- "_type" : "testtype"
- "_id" : "testing_document"
- "version" : 1
- "_version_type" : "external"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml
index aa6d1e9841dd7..9294c696d91e6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml
@@ -198,9 +198,6 @@
---
"Scroll cannot used the request cache":
- - skip:
- version: " - 6.99.99"
- reason: the error message has been added in v7.0.0
- do:
indices.create:
index: test_scroll
@@ -217,9 +214,6 @@
---
"Scroll with size 0":
- - skip:
- version: " - 6.1.99"
- reason: the error message has been added in v6.2.0
- do:
indices.create:
index: test_scroll
@@ -237,10 +231,6 @@
---
"Scroll max_score is null":
- - skip:
- version: " - 6.99.99"
- reason: max_score was set to 0 rather than null before 7.0
-
- do:
indices.create:
index: test_scroll
@@ -285,9 +275,6 @@
---
"Scroll with new response format":
- - skip:
- version: " - 6.9.99"
- reason: hits.total is returned as an object in 7.0.0
- do:
indices.create:
index: test_scroll
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml
index f655b43b98949..228b1a73ecd2b 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml
@@ -103,10 +103,6 @@ setup:
---
"Sliced scroll with invalid arguments":
- - skip:
- version: " - 6.99.99"
- reason: Prior versions return 500 rather than 404
-
- do:
catch: bad_request
search:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml
index a7e75f80e3f6e..0f35f95be0bf6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml
@@ -10,10 +10,6 @@
---
"Max keep alive":
- - skip:
- version: " - 6.99.99"
- reason: search.max_keep_alive was added in 7.0.0
-
- do:
index:
index: test_scroll
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml
index ddb555b8cd5a5..d13636360d2f9 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml
@@ -49,10 +49,6 @@ setup:
---
"top_hits aggregation with nested documents":
- - skip:
- version: " - 6.1.99"
- reason: "<= 6.1 nodes don't always include index or id in nested top hits"
-
- do:
search:
rest_total_hits_as_int: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml
index 3683ad108e8c2..869214f9111c6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml
@@ -702,11 +702,6 @@ setup:
---
"Global ordinals are not loaded with the map execution hint":
-
- - skip:
- version: " - 6.99.99"
- reason: bug fixed in 7.0
-
- do:
index:
refresh: true
@@ -752,11 +747,6 @@ setup:
---
"Global ordinals are loaded with the global_ordinals execution hint":
-
- - skip:
- version: " - 6.99.99"
- reason: bug fixed in 7.0
-
- do:
index:
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml
index 1ec3a302d6884..2e298441918bc 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml
@@ -84,10 +84,6 @@ setup:
---
"Simple Composite aggregation":
- - skip:
- version: " - 6.0.99"
- reason: this uses a new API that has been added in 6.1
-
- do:
search:
rest_total_hits_as_int: true
@@ -113,11 +109,6 @@ setup:
---
"Nested Composite aggregation":
- - skip:
- version: " - 6.0.99"
- reason: this uses a new API that has been added in 6.1
-
-
- do:
search:
rest_total_hits_as_int: true
@@ -163,11 +154,6 @@ setup:
---
"Aggregate After":
- - skip:
- version: " - 6.0.99"
- reason: this uses a new API that has been added in 6.1
-
-
- do:
search:
rest_total_hits_as_int: true
@@ -205,11 +191,6 @@ setup:
---
"Aggregate After Missing":
- - skip:
- version: " - 6.1.99"
- reason: bug fixed in 6.2.0
-
-
- do:
search:
rest_total_hits_as_int: true
@@ -236,10 +217,6 @@ setup:
---
"Invalid Composite aggregation":
- - skip:
- version: " - 6.0.99"
- reason: this uses a new API that has been added in 6.1
-
- do:
catch: /\[composite\] aggregation cannot be used with a parent aggregation/
search:
@@ -426,10 +403,6 @@ setup:
---
"Composite aggregation with after_key in the response":
- - skip:
- version: " - 6.2.99"
- reason: starting in 6.3.0 after_key is returned in the response
-
- do:
search:
rest_total_hits_as_int: true
@@ -455,10 +428,6 @@ setup:
---
"Composite aggregation and array size":
- - skip:
- version: " - 6.99.99"
- reason: starting in 7.0 the composite aggregation throws an execption if the provided size is greater than search.max_buckets.
-
- do:
catch: /.*Trying to create too many buckets.*/
search:
@@ -481,10 +450,6 @@ setup:
---
"Composite aggregation with nested parent":
- - skip:
- version: " - 6.99.99"
- reason: the ability to set a nested parent aggregation was added in 7.0.
-
- do:
search:
rest_total_hits_as_int: true
@@ -1030,3 +995,35 @@ setup:
- length: { aggregations.test.buckets: 1 }
- match: { aggregations.test.buckets.0.key.keyword: "foo" }
- match: { aggregations.test.buckets.0.doc_count: 1 }
+---
+"Simple Composite aggregation with missing order":
+ - skip:
+ version: " - 1.2.99"
+ reason: missing_order is supported in 1.3.0.
+
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ aggregations:
+ test:
+ composite:
+ sources: [
+ "kw": {
+ "terms": {
+ "field": "keyword",
+ "missing_bucket": true,
+ "missing_order": "last"
+ }
+ }
+ ]
+
+ - match: {hits.total: 6}
+ - length: { aggregations.test.buckets: 3 }
+ - match: { aggregations.test.buckets.0.key.kw: "bar" }
+ - match: { aggregations.test.buckets.0.doc_count: 3 }
+ - match: { aggregations.test.buckets.1.key.kw: "foo" }
+ - match: { aggregations.test.buckets.1.doc_count: 2 }
+ - match: { aggregations.test.buckets.2.key.kw: null }
+ - match: { aggregations.test.buckets.2.doc_count: 2 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml
index 75349e9839746..339fe72b77730 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml
@@ -1,8 +1,3 @@
-setup:
- - skip:
- version: " - 6.3.99"
- reason: "moving_fn added in 6.4.0"
-
---
"Bad window":
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml
index c5988fc9e5dc4..6b17132c751de 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml
@@ -1,7 +1,4 @@
setup:
- - skip:
- version: " - 6.3.99"
- reason: weighted_avg is only available as of 6.4.0
- do:
indices.create:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml
index 0cba08fccae9b..03797503436fb 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml
@@ -1,7 +1,4 @@
setup:
- - skip:
- version: " - 6.5.99"
- reason: "added in 6.6.0"
- do:
indices.create:
index: test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml
index bc4105af85e65..2b02c0447e6c8 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml
@@ -31,10 +31,6 @@ setup:
---
"Filter aggs with terms lookup and ensure it's cached":
# Because the filter agg rewrites the terms lookup in the rewrite phase the request can be cached
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
search:
rest_total_hits_as_int: true
@@ -78,10 +74,6 @@ setup:
---
"As a child of terms":
- - skip:
- version: " - 6.99.99"
- reason: the test is written for hits.total.value
-
- do:
bulk:
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/51_filter_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/51_filter_with_types.yml
deleted file mode 100644
index 54476ce6e65b1..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/51_filter_with_types.yml
+++ /dev/null
@@ -1,60 +0,0 @@
-setup:
- - do:
- indices.create:
- include_type_name: true
- index: test
- body:
- settings:
- number_of_shards: 1
- number_of_replicas: 0
- mappings:
- test:
- properties:
- mentions:
- type: keyword
- notifications:
- type: keyword
-
- - do:
- index:
- index: test
- type: test
- id: foo|bar|baz0
- body: { "notifications" : ["abc"] }
-
- - do:
- index:
- index: test
- type: test
- id: foo|bar|baz1
- body: { "mentions" : ["abc"] }
-
- - do:
- indices.refresh: {}
-
----
-"Filter aggs with terms lookup and ensure it's cached":
- # Because the filter agg rewrites the terms lookup in the rewrite phase the request can be cached
- - skip:
- features: allowed_warnings
- - do:
- allowed_warnings:
- - "Deprecated field [type] used, this field is unused and will be removed entirely"
- search:
- rest_total_hits_as_int: true
- size: 0
- request_cache: true
- body: {"aggs": { "itemsNotify": { "filter": { "terms": { "mentions": { "index": "test", "type": "test", "id": "foo|bar|baz0", "path": "notifications"}}}, "aggs": { "mentions" : {"terms" : { "field" : "mentions" }}}}}}
-
- # validate result
- - match: { hits.total: 2 }
- - match: { aggregations.itemsNotify.doc_count: 1 }
- - length: { aggregations.itemsNotify.mentions.buckets: 1 }
- - match: { aggregations.itemsNotify.mentions.buckets.0.key: "abc" }
- # we are using a lookup - this should not cache
- - do:
- indices.stats: { index: test, metric: request_cache}
- - match: { _shards.total: 1 }
- - match: { _all.total.request_cache.hit_count: 0 }
- - match: { _all.total.request_cache.miss_count: 1 }
- - is_true: indices.test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml
index 334708b54b066..462f4f5d25e0b 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml
@@ -28,9 +28,6 @@ setup:
---
"Unified highlighter on a field WITHOUT OFFSETS exceeding index.highlight.max_analyzed_offset should FAIL":
- - skip:
- version: " - 6.99.99"
- reason: index.highlight.max_analyzed_offset setting has been added in 7.0.0
- do:
catch: bad_request
search:
@@ -42,9 +39,6 @@ setup:
---
"Plain highlighter on a field WITHOUT OFFSETS exceeding index.highlight.max_analyzed_offset should FAIL":
- - skip:
- version: " - 6.99.99"
- reason: index.highlight.max_analyzed_offset setting has been added in 7.0.0
- do:
catch: bad_request
search:
@@ -56,9 +50,6 @@ setup:
---
"Unified highlighter on a field WITH OFFSETS exceeding index.highlight.max_analyzed_offset should SUCCEED":
- - skip:
- version: " - 6.99.99"
- reason: index.highligt.max_analyzed_offset setting has been added in 7.0.0
- do:
search:
rest_total_hits_as_int: true
@@ -69,9 +60,6 @@ setup:
---
"Plain highlighter on a field WITH OFFSETS exceeding index.highlight.max_analyzed_offset should FAIL":
- - skip:
- version: " - 6.99.99"
- reason: index.highlight.max_analyzed_offset setting has been added in 7.0.0
- do:
catch: bad_request
search:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
index c3ffd930e9e1d..7b3b4e8233d0b 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
@@ -30,10 +30,8 @@ setup:
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : {} } } }
- match: { hits.total: 1 }
- match: { hits.hits.0._index: "test" }
- - match: { hits.hits.0._type: "_doc" }
- match: { hits.hits.0._id: "1" }
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._index: "test" }
- - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._type: "_doc" }
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._id: "1" }
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.field: "nested_field" }
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.offset: 0 }
@@ -63,7 +61,6 @@ setup:
- match: { hits.total: 1 }
- match: { hits.hits.0._index: "test" }
- - match: { hits.hits.0._type: "_doc" }
- match: { hits.hits.0._id: "1" }
- match: { hits.hits.0._version: 1 }
- match: { hits.hits.0.fields._seq_no: [0] }
@@ -86,7 +83,6 @@ setup:
- match: { hits.total: 1 }
- match: { hits.hits.0._index: "test" }
- - match: { hits.hits.0._type: "_doc" }
- match: { hits.hits.0._id: "1" }
- match: { hits.hits.0._version: 2 }
- match: { hits.hits.0.fields._seq_no: [1] }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml
index a82d7fff480eb..d2933a44e586d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml
@@ -19,7 +19,6 @@ setup:
index: test
- is_true: hits.hits.0._id
- - is_true: hits.hits.0._type
- is_true: hits.hits.0._source
- do:
@@ -30,7 +29,6 @@ setup:
stored_fields: []
- is_true: hits.hits.0._id
- - is_true: hits.hits.0._type
- is_false: hits.hits.0._source
- do:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml
index e89d340347a12..091638d6a07fb 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml
@@ -141,9 +141,6 @@ setup:
---
"docvalue_fields":
- - skip:
- version: " - 6.9.99"
- reason: Triggers a deprecation warning before 7.0
- do:
search:
body:
@@ -152,9 +149,6 @@ setup:
---
"multiple docvalue_fields":
- - skip:
- version: " - 6.9.99"
- reason: Triggered a deprecation warning before 7.0
- do:
search:
body:
@@ -163,9 +157,6 @@ setup:
---
"docvalue_fields as url param":
- - skip:
- version: " - 6.99.99"
- reason: Triggered a deprecation warning before 7.0
- do:
search:
docvalue_fields: [ "count" ]
@@ -174,8 +165,6 @@ setup:
---
"docvalue_fields with default format":
- skip:
- version: " - 6.99.99"
- reason: Only triggers warnings on 7.0+
features: allowed_warnings
- do:
allowed_warnings:
@@ -189,9 +178,6 @@ setup:
---
"docvalue_fields with explicit format":
- - skip:
- version: " - 6.3.99"
- reason: format option was added in 6.4
- do:
search:
body:
@@ -202,9 +188,6 @@ setup:
---
"docvalue_fields - double":
- - skip:
- version: " - 6.99.99"
- reason: Triggered a deprecation warning before 7.0
- do:
search:
body:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
index ec279b8d0d5ec..149bc90f31ea0 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
@@ -73,19 +73,16 @@ setup:
- match: {hits.total: 6 }
- length: {hits.hits: 3 }
- match: {hits.hits.0._index: test }
- - match: {hits.hits.0._type: _doc }
- match: {hits.hits.0.fields.numeric_group: [3] }
- match: {hits.hits.0.sort: [36] }
- match: {hits.hits.0._id: "6" }
- is_false: hits.hits.0.inner_hits
- match: {hits.hits.1._index: test }
- - match: {hits.hits.1._type: _doc }
- match: {hits.hits.1.fields.numeric_group: [1] }
- match: {hits.hits.1.sort: [24] }
- match: {hits.hits.1._id: "3" }
- is_false: hits.hits.1.inner_hits
- match: {hits.hits.2._index: test }
- - match: {hits.hits.2._type: _doc }
- match: {hits.hits.2.fields.numeric_group: [25] }
- match: {hits.hits.2.sort: [10] }
- match: {hits.hits.2._id: "4" }
@@ -111,7 +108,6 @@ setup:
- match: {hits.total: 6 }
- length: {hits.hits: 1 }
- match: {hits.hits.0._index: test }
- - match: {hits.hits.0._type: _doc }
- match: {hits.hits.0.fields.numeric_group: [25]}
- match: {hits.hits.0.sort: [10] }
- match: {hits.hits.0._id: "4" }
@@ -140,7 +136,6 @@ setup:
- match: { hits.total: 6 }
- length: { hits.hits: 3 }
- match: { hits.hits.0._index: test }
- - match: { hits.hits.0._type: _doc }
- match: { hits.hits.0.fields.numeric_group: [3] }
- match: { hits.hits.0.sort: [36] }
- match: { hits.hits.0._id: "6" }
@@ -148,7 +143,6 @@ setup:
- length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 }
- match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" }
- match: { hits.hits.1._index: test }
- - match: { hits.hits.1._type: _doc }
- match: { hits.hits.1.fields.numeric_group: [1] }
- match: { hits.hits.1.sort: [24] }
- match: { hits.hits.1._id: "3" }
@@ -157,7 +151,6 @@ setup:
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" }
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" }
- match: { hits.hits.2._index: test }
- - match: { hits.hits.2._type: _doc }
- match: { hits.hits.2.fields.numeric_group: [25] }
- match: { hits.hits.2.sort: [10] }
- match: { hits.hits.2._id: "4" }
@@ -219,7 +212,6 @@ setup:
- match: { hits.total: 6 }
- length: { hits.hits: 3 }
- match: { hits.hits.0._index: test }
- - match: { hits.hits.0._type: _doc }
- match: { hits.hits.0.fields.numeric_group: [3] }
- match: { hits.hits.0.sort: [36] }
- match: { hits.hits.0._id: "6" }
@@ -227,7 +219,6 @@ setup:
- length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 }
- match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" }
- match: { hits.hits.1._index: test }
- - match: { hits.hits.1._type: _doc }
- match: { hits.hits.1.fields.numeric_group: [1] }
- match: { hits.hits.1.sort: [24] }
- match: { hits.hits.1._id: "3" }
@@ -236,7 +227,6 @@ setup:
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" }
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" }
- match: { hits.hits.2._index: test }
- - match: { hits.hits.2._type: _doc }
- match: { hits.hits.2.fields.numeric_group: [25] }
- match: { hits.hits.2.sort: [10] }
- match: { hits.hits.2._id: "4" }
@@ -315,11 +305,6 @@ setup:
---
"no hits and inner_hits max_score null":
-
- - skip:
- version: " - 6.99.99"
- reason: max_score was set to 0 rather than null before 7.0
-
- do:
search:
rest_total_hits_as_int: true
@@ -353,7 +338,6 @@ setup:
- match: { hits.total: 6 }
- length: { hits.hits: 3 }
- match: { hits.hits.0._index: test }
- - match: { hits.hits.0._type: _doc }
- match: { hits.hits.0.fields.numeric_group: [3] }
- match: { hits.hits.0.sort: [36] }
- match: { hits.hits.0._id: "6" }
@@ -364,7 +348,6 @@ setup:
- length: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits: 1 }
- match: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits.0._id: "6" }
- match: { hits.hits.1._index: test }
- - match: { hits.hits.1._type: _doc }
- match: { hits.hits.1.fields.numeric_group: [1] }
- match: { hits.hits.1.sort: [24] }
- match: { hits.hits.1._id: "3" }
@@ -376,7 +359,6 @@ setup:
- length: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits: 1 }
- match: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits.0._id: "3" }
- match: { hits.hits.2._index: test }
- - match: { hits.hits.2._type: _doc }
- match: { hits.hits.2.fields.numeric_group: [25] }
- match: { hits.hits.2.sort: [10] }
- match: { hits.hits.2._id: "4" }
@@ -390,11 +372,6 @@ setup:
---
"field collapsing, inner_hits and version":
-
- - skip:
- version: " - 6.1.0"
- reason: "bug fixed in 6.1.1"
-
- do:
count:
index: test
@@ -412,7 +389,6 @@ setup:
- match: { hits.total: 6 }
- length: { hits.hits: 3 }
- match: { hits.hits.0._index: test }
- - match: { hits.hits.0._type: _doc }
- match: { hits.hits.0.fields.numeric_group: [3] }
- match: { hits.hits.0.sort: [36] }
- match: { hits.hits.0._id: "6" }
@@ -422,7 +398,6 @@ setup:
- match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" }
- match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._version: 66 }
- match: { hits.hits.1._index: test }
- - match: { hits.hits.1._type: _doc }
- match: { hits.hits.1.fields.numeric_group: [1] }
- match: { hits.hits.1.sort: [24] }
- match: { hits.hits.1._id: "3" }
@@ -434,7 +409,6 @@ setup:
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" }
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._version: 11 }
- match: { hits.hits.2._index: test }
- - match: { hits.hits.2._type: _doc }
- match: { hits.hits.2.fields.numeric_group: [25] }
- match: { hits.hits.2.sort: [10] }
- match: { hits.hits.2._id: "4" }
@@ -493,11 +467,6 @@ setup:
---
"field collapsing, inner_hits and seq_no":
-
- - skip:
- version: " - 6.99.0"
- reason: "sequence numbers introduced in 7.0.0"
-
- do:
search:
rest_total_hits_as_int: true
@@ -532,7 +501,6 @@ setup:
- gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._seq_no: 0 }
- gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._primary_term: 1 }
- match: { hits.hits.2._index: test }
- - match: { hits.hits.2._type: _doc }
- match: { hits.hits.2.fields.numeric_group: [25] }
- match: { hits.hits.2.sort: [10] }
- match: { hits.hits.2._id: "4" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml
index 33b149b00a4fb..201e456be2cdd 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml
@@ -550,9 +550,6 @@ setup:
---
"Test exists query on _index field":
- - skip:
- version: " - 6.0.99"
- reason: exists on _index not supported prior to 6.1.0
- do:
search:
rest_total_hits_as_int: true
@@ -566,9 +563,6 @@ setup:
---
"Test exists query on _type field":
- - skip:
- version: " - 6.0.99"
- reason: exists on _type not supported prior to 6.1.0
- do:
search:
rest_total_hits_as_int: true
@@ -608,9 +602,6 @@ setup:
---
"Test exists query on _source field":
- - skip:
- version: " - 6.0.99"
- reason: exists on _source not supported prior to 6.1.0
- do:
catch: /query_shard_exception/
search:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml
index 89ea24618c68f..82ccb816f2314 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml
@@ -1,8 +1,5 @@
---
"Terms Query with No.of terms exceeding index.max_terms_count should FAIL":
- - skip:
- version: " - 6.99.99"
- reason: index.max_terms_count setting has been added in 7.0.0
- do:
indices.create:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml
deleted file mode 100644
index d3d48eae4082d..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml
+++ /dev/null
@@ -1,63 +0,0 @@
----
-"Terms Query with No.of terms exceeding index.max_terms_count should FAIL":
- - skip:
- version: " - 6.99.99"
- reason: index.max_terms_count setting has been added in 7.0.0
- features: allowed_warnings
- - do:
- indices.create:
- include_type_name: true
- index: test_index
- body:
- settings:
- number_of_shards: 1
- index.max_terms_count: 2
- mappings:
- test_type:
- properties:
- user:
- type: keyword
- followers:
- type: keyword
- - do:
- bulk:
- refresh: true
- body:
- - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u1"}}'
- - '{"user": "u1", "followers": ["u2", "u3"]}'
- - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u2"}}'
- - '{"user": "u2", "followers": ["u1", "u3", "u4"]}'
- - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u3"}}'
- - '{"user": "u3", "followers": ["u1"]}'
- - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u4"}}'
- - '{"user": "u4", "followers": ["u3"]}'
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_index
- body: {"query" : {"terms" : {"user" : ["u1", "u2"]}}}
- - match: { hits.total: 2 }
-
- - do:
- catch: bad_request
- search:
- rest_total_hits_as_int: true
- index: test_index
- body: {"query" : {"terms" : {"user" : ["u1", "u2", "u3"]}}}
-
- - do:
- allowed_warnings:
- - "Deprecated field [type] used, this field is unused and will be removed entirely"
- search:
- rest_total_hits_as_int: true
- index: test_index
- body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u1", "path" : "followers"}}}}
- - match: { hits.total: 2 }
-
- - do:
- catch: bad_request
- search:
- rest_total_hits_as_int: true
- index: test_index
- body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u2", "path" : "followers"}}}}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml
index 40c80b88cfb1b..6f276f669f815 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml
@@ -1,8 +1,4 @@
setup:
- - skip:
- version: " - 6.2.99"
- reason: index_prefixes is only available as of 6.3.0
-
- do:
indices.create:
index: test
@@ -27,9 +23,6 @@ setup:
---
"search with index prefixes":
- - skip:
- version: " - 6.2.99"
- reason: index_prefixes is only available as of 6.3.0
- do:
search:
rest_total_hits_as_int: true
@@ -85,10 +78,6 @@ setup:
---
"search index prefixes with span_multi":
- - skip:
- version: " - 6.99.99"
- reason: span_multi throws an exception with prefix fields on < versions
-
- do:
search:
rest_total_hits_as_int: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_ignore_malformed.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_ignore_malformed.yml
index 71ddb32302396..8596821a76c41 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_ignore_malformed.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_ignore_malformed.yml
@@ -1,9 +1,5 @@
---
setup:
- - skip:
- version: " - 6.3.99"
- reason: _ignored was added in 6.4.0
-
- do:
indices.create:
index: test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_index_phrase_search.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_index_phrase_search.yml
index b48857be4e7a1..6340b20a4765f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_index_phrase_search.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_index_phrase_search.yml
@@ -1,8 +1,5 @@
---
"search with indexed phrases":
- - skip:
- version: " - 6.99.99"
- reason: index_phrase is only available as of 7.0.0
- do:
indices.create:
index: test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml
index fd4621e48cad3..14d68fa3c429a 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml
@@ -46,7 +46,6 @@ setup:
- match: {hits.total: 1}
- match: {hits.hits.0._index: test_1 }
- - match: {hits.hits.0._type: _doc }
- match: {hits.hits.0._id: "1" }
- do:
@@ -60,7 +59,6 @@ setup:
- match: {hits.total: 1}
- match: {hits.hits.0._index: test_2 }
- - match: {hits.hits.0._type: _doc }
- match: {hits.hits.0._id: "42" }
---
@@ -76,9 +74,6 @@ setup:
---
"Search with new response format":
- - skip:
- version: " - 6.99.99"
- reason: hits.total is returned as an object in 7.0.0
- do:
search:
body:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml
index 92bb049980dff..4d63a81a99595 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml
@@ -1,8 +1,5 @@
---
"Score should match explanation in rescore":
- - skip:
- version: " - 6.99.99"
- reason: Explanation for rescoring was corrected after these versions
- do:
bulk:
refresh: true
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml
index 78380d0da6a71..0286d3caf66b8 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml
@@ -1,8 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: "Implemented in 7.0"
-
- do:
indices.create:
index: test
@@ -17,16 +13,73 @@ setup:
refresh: true
body:
- '{"index": {"_index": "test", "_id": "1"}}'
- - '{"text" : "Some like it hot, some like it cold"}'
+ - '{"text" : "Some like hot and dry, some like it cold and wet"}'
- '{"index": {"_index": "test", "_id": "2"}}'
- '{"text" : "Its cold outside, theres no kind of atmosphere"}'
- '{"index": {"_index": "test", "_id": "3"}}'
- '{"text" : "Baby its cold there outside"}'
- '{"index": {"_index": "test", "_id": "4"}}'
- '{"text" : "Outside it is cold and wet"}'
+ - '{"index": {"_index": "test", "_id": "5"}}'
+ - '{"text" : "cold rain makes it wet"}'
+ - '{"index": {"_index": "test", "_id": "6"}}'
+ - '{"text" : "that is some cold cold rain"}'
---
-"Test ordered matching":
+"Test regexp":
+ - skip:
+ version: " - 1.2.99"
+ reason: "regexp introduced in 1.3"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ regexp:
+ pattern: "at[a-z]{2,}here"
+ - match: { hits.total.value: 1 }
+
+---
+"Test regexp, explicit case sensitive":
+ - skip:
+ version: " - 1.2.99"
+ reason: "case_insensitive introduced in 1.3"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ regexp:
+ pattern: "AT[a-z]{2,}HERE"
+ case_insensitive: false
+ - match: { hits.total.value: 0 }
+
+---
+"Test regexp, explicit case insensitive":
+ - skip:
+ version: " - 1.2.99"
+ reason: "case_insensitive introduced in 1.3"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ regexp:
+ pattern: "AT[a-z]{2,}HERE"
+ case_insensitive: true
+ - match: { hits.total.value: 1 }
+
+---
+"Test ordered matching with via mode":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -36,7 +89,25 @@ setup:
text:
match:
query: "cold outside"
- ordered: true
+ mode: "ordered"
+ - match: { hits.total.value: 2 }
+
+---
+"Test ordered matching":
+ - skip:
+ features: allowed_warnings
+ - do:
+ allowed_warnings:
+ - "Deprecated field [ordered] used, this field is unused and will be removed entirely"
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ match:
+ query: "cold outside"
+ ordered: true
- match: { hits.total.value: 2 }
---
@@ -52,9 +123,30 @@ setup:
query: "cold outside"
- match: { hits.total.value: 3 }
+---
+"Test explicit unordered matching via mode":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ match:
+ query: "cold outside"
+ mode: "unordered"
+ - match: { hits.total.value: 3 }
+
---
"Test explicit unordered matching":
+ - skip:
+ features: allowed_warnings
- do:
+ allowed_warnings:
+ - "Deprecated field [ordered] used, this field is unused and will be removed entirely"
search:
index: test
body:
@@ -66,8 +158,45 @@ setup:
ordered: false
- match: { hits.total.value: 3 }
+---
+"Test unordered with overlap in match":
+ - skip:
+ version: " - 1.2.99"
+ reason: "Implemented in 2.0"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ match:
+ query: "cold wet it"
+ mode: "unordered"
+ - match: { hits.total.value: 3 }
+
+---
+"Test unordered with no overlap in match":
+ - skip:
+ version: " - 1.2.99"
+ reason: "Implemented in 2.0"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ match:
+ query: "cold wet it"
+ mode: "unordered_no_overlap"
+ - match: { hits.total.value: 2 }
+
---
"Test phrase matching":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -77,7 +206,7 @@ setup:
text:
match:
query: "cold outside"
- ordered: true
+ mode: "ordered"
max_gaps: 0
- match: { hits.total.value: 1 }
@@ -97,6 +226,9 @@ setup:
---
"Test ordered max_gaps matching":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -107,12 +239,41 @@ setup:
match:
query: "cold outside"
max_gaps: 0
- ordered: true
+ mode: "ordered"
+ - match: { hits.total.value: 1 }
+
+---
+"Test ordered combination with disjunction via mode":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ all_of:
+ intervals:
+ - any_of:
+ intervals:
+ - match:
+ query: "cold"
+ - match:
+ query: "outside"
+ - match:
+ query: "atmosphere"
+ mode: "ordered"
- match: { hits.total.value: 1 }
---
"Test ordered combination with disjunction":
+ - skip:
+ features: allowed_warnings
- do:
+ allowed_warnings:
+ - "Deprecated field [ordered] used, this field is unused and will be removed entirely"
search:
index: test
body:
@@ -134,6 +295,9 @@ setup:
---
"Test ordered combination with max_gaps":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -148,11 +312,14 @@ setup:
- match:
query: "outside"
max_gaps: 0
- ordered: true
+ mode: "ordered"
- match: { hits.total.value: 1 }
---
"Test ordered combination":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -166,12 +333,38 @@ setup:
query: "cold"
- match:
query: "outside"
- ordered: true
+ mode: "ordered"
+ - match: { hits.total.value: 2 }
+
+---
+"Test unordered combination via mode":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ all_of:
+ intervals:
+ - match:
+ query: "cold"
+ - match:
+ query: "outside"
+ max_gaps: 1
+ mode: "unordered"
- match: { hits.total.value: 2 }
---
"Test unordered combination":
+ - skip:
+ features: allowed_warnings
- do:
+ allowed_warnings:
+ - "Deprecated field [ordered] used, this field is unused and will be removed entirely"
search:
index: test
body:
@@ -188,8 +381,107 @@ setup:
ordered: false
- match: { hits.total.value: 2 }
+---
+"Test unordered combination with overlap":
+ - skip:
+ version: " - 1.2.99"
+ reason: "Implemented in 2.0"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ all_of:
+ intervals:
+ - match:
+ query: "cold"
+ - match:
+ query: "wet"
+ - match:
+ query: "it"
+ mode: "unordered"
+ - match: { hits.total.value: 3 }
+
+---
+"Test unordered combination no overlap":
+ - skip:
+ version: " - 1.2.99"
+ reason: "Implemented in 2.0"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ all_of:
+ intervals:
+ - match:
+ query: "cold"
+ - match:
+ query: "wet"
+ - match:
+ query: "it"
+ mode: "unordered_no_overlap"
+ - match: { hits.total.value: 2 }
+
+---
+"Test nested unordered combination with overlap":
+ - skip:
+ version: " - 1.2.99"
+ reason: "Implemented in 2.0"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ all_of:
+ intervals:
+ - any_of:
+ intervals:
+ - match:
+ query: "cold"
+ - match:
+ query: "hot"
+ - match:
+ query: "cold"
+ mode: "unordered"
+ - match: { hits.total.value: 6 }
+
+---
+"Test nested unordered combination no overlap":
+ - skip:
+ version: " - 1.2.99"
+ reason: "Implemented in 2.0"
+ - do:
+ search:
+ index: test
+ body:
+ query:
+ intervals:
+ text:
+ all_of:
+ intervals:
+ - any_of:
+ intervals:
+ - match:
+ query: "cold"
+ - match:
+ query: "hot"
+ - match:
+ query: "cold"
+ mode: "unordered_no_overlap"
+ - match: { hits.total.value: 2 }
+
---
"Test block combination":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -203,13 +495,16 @@ setup:
query: "cold"
- match:
query: "outside"
- ordered: true
+ mode: "ordered"
max_gaps: 0
- match: { hits.total.value: 1 }
---
"Test containing":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -223,7 +518,7 @@ setup:
query: "cold"
- match:
query: "outside"
- ordered: false
+ mode: "unordered"
filter:
containing:
match:
@@ -233,6 +528,9 @@ setup:
---
"Test not containing":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -246,7 +544,7 @@ setup:
query: "cold"
- match:
query: "outside"
- ordered: false
+ mode: "unordered"
filter:
not_containing:
match:
@@ -255,6 +553,9 @@ setup:
---
"Test contained_by":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -272,7 +573,7 @@ setup:
query: "cold"
- match:
query: "outside"
- ordered: false
+ mode: "unordered"
- match: { hits.total.value: 1 }
---
@@ -294,10 +595,13 @@ setup:
query: "cold"
- match:
query: "outside"
- - match: { hits.total.value: 1 }
+ - match: { hits.total.value: 2 }
---
"Test not_overlapping":
+ - skip:
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -311,7 +615,7 @@ setup:
query: "cold"
- match:
query: "outside"
- ordered: true
+ mode: "ordered"
filter:
not_overlapping:
all_of:
@@ -320,14 +624,14 @@ setup:
query: "baby"
- match:
query: "there"
- ordered: false
+ mode: "unordered"
- match: { hits.total.value: 1 }
---
"Test overlapping":
- skip:
- version: " - 7.1.99"
- reason: "Implemented in 7.2"
+ version: " - 1.2.99"
+ reason: "mode introduced in 1.3"
- do:
search:
index: test
@@ -337,12 +641,12 @@ setup:
text:
match:
query: "cold outside"
- ordered: true
+ mode: "ordered"
filter:
overlapping:
match:
query: "baby there"
- ordered: false
+ mode: "unordered"
- match: { hits.total.value: 1 }
- match: { hits.hits.0._id: "3" }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml
index 7657dc2bebb36..feb875e81a785 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml
@@ -1,8 +1,4 @@
setup:
- - skip:
- version: " - 6.99.99"
- reason: "Implemented in 7.0"
-
- do:
indices.create:
index: date_ns
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml
index 17735c7fd451a..1f550d114cf29 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml
@@ -64,10 +64,6 @@ setup:
---
"Docvalues_fields size limit":
-
- - skip:
- version: " - 6.99.99"
- reason: "Triggers warnings before 7.0"
- do:
catch: /Trying to retrieve too many docvalue_fields\. Must be less than or equal to[:] \[2\] but was \[3\]\. This limit can be set by changing the \[index.max_docvalue_fields_search\] index level setting\./
search:
@@ -99,10 +95,6 @@ setup:
---
"Regexp length limit":
- - skip:
- version: " - 6.99.99"
- reason: "The regex length limit was introduced in 7.0.0"
-
- do:
catch: /The length of regex \[1110\] used in the Regexp Query request has exceeded the allowed maximum of \[1000\]\. This maximum can be set by changing the \[index.max_regex_length\] index level setting\./
search:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml
index d306cb7b1ad50..e38f5f862a273 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml
@@ -31,10 +31,8 @@
- is_true: _shards.total
- is_true: hits.total
- is_true: hits.hits.0._index
- - is_true: hits.hits.0._type
- is_true: hits.hits.0._id
- is_true: hits.hits.1._index
- - is_true: hits.hits.1._type
- is_true: hits.hits.1._id
- do:
@@ -48,10 +46,8 @@
- is_false: _shards.total
- is_false: hits.total
- is_false: hits.hits.0._index
- - is_false: hits.hits.0._type
- is_false: hits.hits.0._id
- is_false: hits.hits.1._index
- - is_false: hits.hits.1._type
- is_false: hits.hits.1._id
- do:
@@ -65,10 +61,8 @@
- is_true: _shards.total
- is_false: hits.total
- is_false: hits.hits.0._index
- - is_false: hits.hits.0._type
- is_false: hits.hits.0._id
- is_false: hits.hits.1._index
- - is_false: hits.hits.1._type
- is_false: hits.hits.1._id
- do:
@@ -82,10 +76,8 @@
- is_true: _shards.total
- is_true: hits.total
- is_true: hits.hits.0._index
- - is_false: hits.hits.0._type
- is_true: hits.hits.0._id
- is_true: hits.hits.1._index
- - is_false: hits.hits.1._type
- is_true: hits.hits.1._id
---
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml
index 9f0273fbc0213..5f5d88dba7687 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml
@@ -38,7 +38,6 @@
- match: {hits.total: 3 }
- length: {hits.hits: 1 }
- match: {hits.hits.0._index: test }
- - match: {hits.hits.0._type: _doc }
- match: {hits.hits.0._id: "172" }
- match: {hits.hits.0.sort: [24, 172] }
@@ -57,7 +56,6 @@
- match: {hits.total: 3 }
- length: {hits.hits: 1 }
- match: {hits.hits.0._index: test }
- - match: {hits.hits.0._type: _doc }
- match: {hits.hits.0._id: "42" }
- match: {hits.hits.0.sort: [18, 42] }
@@ -76,7 +74,6 @@
- match: {hits.total: 3}
- length: {hits.hits: 1 }
- match: {hits.hits.0._index: test }
- - match: {hits.hits.0._type: _doc }
- match: {hits.hits.0._id: "1" }
- match: {hits.hits.0.sort: [18, 1] }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml
index fe70620c6ef62..ee831e78c74a6 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml
@@ -85,10 +85,6 @@ setup:
---
"Create a snapshot for missing index":
- - skip:
- version: " - 6.0.0"
- reason: ignore_unavailable default is false in 6.0.0
-
- do:
catch: missing
snapshot.create:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml
index 874dda3606c4a..57a4cb55852a5 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml
@@ -96,10 +96,6 @@ setup:
---
"Get snapshot info contains include_global_state":
- - skip:
- version: " - 6.1.99"
- reason: "include_global_state field has been added in the response in 6.2.0"
-
- do:
indices.create:
index: test_index
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml
index b64a51141dc6e..dfed3346726cf 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml
@@ -281,21 +281,15 @@ setup:
- length: { suggest.result.0.options: 2 }
- match: { suggest.result.0.options.0.text: "baz" }
- match: { suggest.result.0.options.0._index: "test" }
- - match: { suggest.result.0.options.0._type: "_doc" }
- match: { suggest.result.0.options.0._source.title: "title_baz" }
- match: { suggest.result.0.options.0._source.count: 3 }
- match: { suggest.result.0.options.1.text: "bar" }
- match: { suggest.result.0.options.1._index: "test" }
- - match: { suggest.result.0.options.1._type: "_doc" }
- match: { suggest.result.0.options.1._source.title: "title_bar" }
- match: { suggest.result.0.options.1._source.count: 4 }
---
"Skip duplicates should work":
- - skip:
- version: " - 6.0.99"
- reason: skip_duplicates was added in 6.1
-
- do:
index:
index: test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml
index e2c7ccfb421e3..df415ef484b1f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml
@@ -277,10 +277,6 @@ setup:
---
"Skip duplicates with contexts should work":
- - skip:
- version: " - 6.0.99"
- reason: skip_duplicates was added in 6.1
-
- do:
index:
index: test
@@ -333,10 +329,6 @@ setup:
---
"Indexing and Querying without contexts is forbidden":
- - skip:
- version: " - 6.99.99"
- reason: this feature was removed in 7.0
-
- do:
index:
index: test
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml
index a29019183e199..bcd5fa14c87f9 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml
@@ -1,11 +1,6 @@
---
"Search by suggestion and by keyword sub-field should work":
-
- - skip:
- version: " - 6.99.99"
- reason: "Search by suggestion with multi-fields was introduced 7.0.0"
-
- do:
indices.create:
index: completion_with_sub_keyword
@@ -63,11 +58,6 @@
---
"Search by suggestion on sub field should work":
-
- - skip:
- version: " - 6.99.99"
- reason: "Search by suggestion with multi-fields was introduced 7.0.0"
-
- do:
indices.create:
index: completion_with_sub_completion
@@ -113,11 +103,6 @@
---
"Search by suggestion on sub field with context should work":
-
- - skip:
- version: " - 6.99.99"
- reason: "Search by suggestion with multi-fields was introduced 7.0.0"
-
- do:
indices.create:
index: completion_with_context
@@ -182,11 +167,6 @@
---
"Search by suggestion on sub field with weight should work":
-
- - skip:
- version: " - 6.99.99"
- reason: "Search by suggestion with multi-fields was introduced 7.0.0"
-
- do:
indices.create:
index: completion_with_weight
@@ -238,11 +218,6 @@
---
"Search by suggestion on geofield-hash on sub field should work":
-
- - skip:
- version: " - 6.99.99"
- reason: "Search by suggestion with multi-fields was introduced 7.0.0"
-
- do:
indices.create:
index: geofield_with_completion
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml
index 1742134af2b75..d0385ac0125f4 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml
@@ -23,10 +23,7 @@
---
"tasks_list headers":
- skip:
- version: " - 6.99.99"
- features: headers
- reason: task headers has been added in 7.0.0
-
+ features: headers
- do:
headers: { "X-Opaque-Id": "That is me" }
tasks.list:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/11_basic_with_types.yml
deleted file mode 100644
index 992d6db7ca786..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/11_basic_with_types.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-setup:
- - do:
- indices.create:
- include_type_name: true
- index: testidx
- body:
- mappings:
- testtype:
- "properties":
- "text":
- "type" : "text"
- "term_vector" : "with_positions_offsets"
- - do:
- index:
- index: testidx
- type: testtype
- id: testing_document
- body:
- "text" : "The quick brown fox is brown."
- - do:
- indices.refresh: {}
-
----
-"Basic tests for termvector get":
-
- - do:
- termvectors:
- index: testidx
- type: testtype
- id: testing_document
- "term_statistics" : true
-
-
- - match: {term_vectors.text.field_statistics.sum_doc_freq: 5}
- - match: {term_vectors.text.terms.brown.doc_freq: 1}
- - match: {term_vectors.text.terms.brown.tokens.0.start_offset: 10}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml
index 5f43e8a247923..44a78cadc1ada 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml
@@ -1,8 +1,3 @@
-setup:
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
---
"Term vector API should return 'found: false' for docs between index and refresh":
- do:
@@ -39,6 +34,5 @@ setup:
realtime: false
- match: { _index: "testidx" }
- - match: { _type: "_doc" }
- match: { _id: "1" }
- is_false: found
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/21_issue7121_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/21_issue7121_with_types.yml
deleted file mode 100644
index cf597bf141f61..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/21_issue7121_with_types.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-"Term vector API should return 'found: false' for docs between index and refresh":
- - do:
- indices.create:
- include_type_name: true
- index: testidx
- body:
- settings:
- index:
- translog.flush_threshold_size: "512MB"
- number_of_shards: 1
- number_of_replicas: 0
- refresh_interval: -1
- mappings:
- doc:
- properties:
- text:
- type : "text"
- term_vector : "with_positions_offsets"
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: testidx
- type: doc
- id: 1
- body:
- text : "foo bar"
-
- - do:
- termvectors:
- index: testidx
- type: doc
- id: 1
- realtime: false
-
- - match: { _index: "testidx" }
- - match: { _type: "doc" }
- - match: { _id: "1" }
- - is_false: found
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml
index 0cb6dfc06904b..1d357bb587021 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml
@@ -1,8 +1,3 @@
-setup:
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
---
"Realtime Term Vectors":
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/31_realtime_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/31_realtime_with_types.yml
deleted file mode 100644
index 26f441207ace8..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/31_realtime_with_types.yml
+++ /dev/null
@@ -1,40 +0,0 @@
----
-"Realtime Term Vectors":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- refresh_interval: -1
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: test_1
- type: test
- id: 1
- body: { foo: bar }
-
- - do:
- termvectors:
- index: test_1
- type: test
- id: 1
- realtime: false
-
- - is_false: found
-
- - do:
- termvectors:
- index: test_1
- type: test
- id: 1
- realtime: true
-
- - is_true: found
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml
deleted file mode 100644
index 4382442dee4dd..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,46 +0,0 @@
----
-"Term vectors with typeless API on an index that has types":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "text"
- term_vector: "with_positions"
-
- - do:
- index:
- index: index
- type: not_doc
- id: 1
- body: { foo: bar }
-
- - do:
- indices.refresh: {}
-
- - do:
- termvectors:
- index: index
- type: _doc # todo: remove when termvectors support typeless API
- id: 1
-
- - is_true: found
- - match: {_type: _doc}
- - match: {term_vectors.foo.terms.bar.term_freq: 1}
-
- - do:
- termvectors:
- index: index
- type: some_random_type
- id: 1
-
- - is_false: found
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml
index 3a35ad46f9161..4cb6710cc161c 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml
@@ -1,10 +1,5 @@
---
"Partial document":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
-
- do:
index:
index: test_1
@@ -25,7 +20,6 @@
one: 3
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: "1" }
- match: { _version: 2 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml
index 41dba3551e64c..ffcb72027b33d 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml
@@ -32,7 +32,6 @@
foo: baz
- match: { _index: foobar }
- - match: { _type: _doc }
- match: { _id: "1"}
- match: { _version: 2}
- match: { _shards.total: 1}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml
index 657c036291bd6..ff81bdfd39b26 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml
@@ -1,9 +1,5 @@
---
"Update result field":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
update:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml
index 08f3457400d4f..a97c68ba6ee3f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml
@@ -21,7 +21,6 @@
one: 3
- match: { _index: test_1 }
- - match: { _type: _doc }
- match: { _id: "1" }
- match: { _version: 2 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml
deleted file mode 100644
index eb2e4ff9a9117..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml
+++ /dev/null
@@ -1,39 +0,0 @@
----
-"Update check shard header":
-
- - do:
- indices.create:
- index: foobar
- body:
- settings:
- number_of_shards: "1"
- number_of_replicas: "0"
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- index:
- index: foobar
- type: baz
- id: 1
- body: { foo: bar }
-
- - do:
- update:
- index: foobar
- type: baz
- id: 1
- body:
- doc:
- foo: baz
-
- - match: { _index: foobar }
- - match: { _type: baz }
- - match: { _id: "1"}
- - match: { _version: 2}
- - match: { _shards.total: 1}
- - match: { _shards.successful: 1}
- - match: { _shards.failed: 0}
- - is_false: _shards.pending
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml
deleted file mode 100644
index 9adada6d54b4f..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml
+++ /dev/null
@@ -1,52 +0,0 @@
----
-"Update result field":
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: bar }
- doc_as_upsert: true
-
- - match: { _version: 1 }
- - match: { result: created }
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: bar }
- doc_as_upsert: true
-
- - match: { _version: 1 }
- - match: { result: noop }
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: bar }
- doc_as_upsert: true
- detect_noop: false
-
- - match: { _version: 2 }
- - match: { result: updated }
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: baz }
- doc_as_upsert: true
- detect_noop: true
-
- - match: { _version: 3 }
- - match: { result: updated }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/16_noop.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/16_noop.yml
index bfb56541fb7eb..dfdf2a10c84fc 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/16_noop.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/16_noop.yml
@@ -6,7 +6,6 @@
- do:
index:
index: test_1
- type: test
id: 1
body: { foo: bar }
@@ -18,7 +17,6 @@
- do:
update:
index: test_1
- type: test
id: 1
body:
doc: { foo: bar }
@@ -31,7 +29,6 @@
- do:
update:
index: test_1
- type: test
id: 1
body:
doc: { foo: bar }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml
index a849eecc66629..4d03971aba252 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml
@@ -1,9 +1,5 @@
---
"Doc upsert":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
update:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml
deleted file mode 100644
index f34e030ff66a0..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml
+++ /dev/null
@@ -1,41 +0,0 @@
----
-"Doc upsert":
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: bar, count: 1 }
- upsert: { foo: baz }
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
-
- - match: { _source.foo: baz }
- - is_false: _source.count
-
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: bar, count: 1 }
- upsert: { foo: baz }
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
-
- - match: { _source.foo: bar }
- - match: { _source.count: 1 }
-
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml
index 5bdc3ecea75fc..c65fc5af27fcc 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml
@@ -1,9 +1,5 @@
---
"Doc as upsert":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
update:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml
deleted file mode 100644
index 7585b9f3e0b94..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml
+++ /dev/null
@@ -1,41 +0,0 @@
----
-"Doc as upsert":
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: bar, count: 1 }
- doc_as_upsert: true
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
-
- - match: { _source.foo: bar }
- - match: { _source.count: 1 }
-
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { count: 2 }
- doc_as_upsert: true
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
-
- - match: { _source.foo: bar }
- - match: { _source.count: 2 }
-
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml
deleted file mode 100644
index 977db506710c7..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml
+++ /dev/null
@@ -1,58 +0,0 @@
----
-"Routing":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index:
- number_of_shards: 5
- number_of_routing_shards: 5
- number_of_replicas: 0
-
- - do:
- cluster.health:
- wait_for_status: green
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- routing: 5
- body:
- doc: { foo: baz }
- upsert: { foo: bar }
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- routing: 5
- stored_fields: _routing
-
- - match: { _routing: "5"}
-
- - do:
- catch: missing
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: baz }
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- routing: 5
- _source: foo
- body:
- doc: { foo: baz }
-
- - match: { get._source.foo: baz }
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml
deleted file mode 100644
index be2d9f9f7969e..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml
+++ /dev/null
@@ -1,115 +0,0 @@
----
-"Refresh":
-
- - do:
- indices.create:
- index: test_1
- body:
- settings:
- index.refresh_interval: -1
- number_of_replicas: 0
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- body:
- doc: { foo: baz }
- upsert: { foo: bar }
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 1 }}
-
- - match: { hits.total: 0 }
-
- - do:
- update:
- index: test_1
- type: test
- id: 2
- refresh: true
- body:
- doc: { foo: baz }
- upsert: { foo: bar }
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { _id: 2 }}
-
- - match: { hits.total: 1 }
-
----
-"When refresh url parameter is an empty string that means \"refresh immediately\"":
- - do:
- index:
- index: test_1
- type: test
- id: 1
- refresh: true
- body: { foo: bar }
- - is_true: forced_refresh
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- refresh: ""
- body:
- doc: {cat: dog}
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: test_1
- body:
- query: { term: { cat: dog }}
-
- - match: { hits.total: 1 }
-
----
-"refresh=wait_for waits until changes are visible in search":
- - do:
- index:
- index: update_60_refresh_1
- type: test
- id: update_60_refresh_id1
- body: { foo: bar }
- refresh: true
- - is_true: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: update_60_refresh_1
- body:
- query: { term: { _id: update_60_refresh_id1 }}
- - match: { hits.total: 1 }
-
- - do:
- update:
- index: update_60_refresh_1
- type: test
- id: update_60_refresh_id1
- refresh: wait_for
- body:
- doc: { test: asdf }
- - is_false: forced_refresh
-
- - do:
- search:
- rest_total_hits_as_int: true
- index: update_60_refresh_1
- body:
- query: { match: { test: asdf } }
- - match: { hits.total: 1 }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml
index 9e6d5a4671955..e196e03143456 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml
@@ -1,9 +1,5 @@
---
"Source filtering":
-
- - skip:
- version: " - 6.99.99"
- reason: types are required in requests before 7.0.0
- do:
update:
index: test_1
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml
deleted file mode 100644
index 4bb22e6b8012e..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml
+++ /dev/null
@@ -1,19 +0,0 @@
----
-"Source filtering":
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- _source: [foo, bar]
- body:
- doc: { foo: baz }
- upsert: { foo: bar }
-
- - match: { get._source.foo: bar }
- - is_false: get._source.bar
-
-# TODO:
-#
-# - Add _routing
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml
deleted file mode 100644
index f7791d0986399..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml
+++ /dev/null
@@ -1,33 +0,0 @@
----
-"Metadata Fields":
-
- - skip:
- version: "all"
- reason: "Update doesn't return metadata fields, waiting for #3259"
-
- - do:
- indices.create:
- index: test_1
-
- - do:
- update:
- index: test_1
- type: test
- id: 1
- parent: 5
- fields: [ _routing ]
- body:
- doc: { foo: baz }
- upsert: { foo: bar }
-
- - match: { get._routing: "5" }
-
- - do:
- get:
- index: test_1
- type: test
- id: 1
- parent: 5
- stored_fields: [ _routing ]
-
-
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/90_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/90_mix_typeless_typeful.yml
deleted file mode 100644
index 0ca25e8598c24..0000000000000
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/90_mix_typeless_typeful.yml
+++ /dev/null
@@ -1,86 +0,0 @@
----
-"Update with typeless API on an index that has types":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- index:
- index: index
- type: not_doc
- id: 1
- body: { foo: bar }
-
- - do:
- update:
- index: index
- id: 1
- body:
- doc:
- foo: baz
-
- - do:
- get:
- index: index
- type: not_doc
- id: 1
-
- - match: { _source.foo: baz }
-
----
-"Update call that introduces new field mappings":
-
- - skip:
- version: " - 6.99.99"
- reason: Typeless APIs were introduced in 7.0.0
-
- - do:
- indices.create: # not using include_type_name: false on purpose
- include_type_name: true
- index: index
- body:
- mappings:
- not_doc:
- properties:
- foo:
- type: "keyword"
-
- - do:
- index:
- index: index
- type: not_doc
- id: 1
- body: { foo: bar }
-
- - do:
- update:
- index: index
- id: 1
- body:
- doc:
- foo: baz
- new_field: value
- - do:
- get: # using typeful API on purpose
- index: index
- type: not_doc
- id: 1
-
- - match: { _index: "index" }
- - match: { _type: "not_doc" }
- - match: { _id: "1" }
- - match: { _version: 2}
- - match: { _source.foo: baz }
- - match: { _source.new_field: value }
diff --git a/server/build.gradle b/server/build.gradle
index 44b88754312ac..aa467cd0528bf 100644
--- a/server/build.gradle
+++ b/server/build.gradle
@@ -227,7 +227,6 @@ tasks.named("thirdPartyAudit").configure {
'com.fasterxml.jackson.databind.ObjectMapper',
// from log4j
- 'com.conversantmedia.util.concurrent.DisruptorBlockingQueue',
'com.conversantmedia.util.concurrent.SpinPolicy',
'com.fasterxml.jackson.annotation.JsonInclude$Include',
'com.fasterxml.jackson.databind.DeserializationContext',
@@ -251,8 +250,6 @@ tasks.named("thirdPartyAudit").configure {
'com.fasterxml.jackson.databind.node.ObjectNode',
'org.fusesource.jansi.Ansi',
'org.fusesource.jansi.AnsiRenderer$Code',
- 'com.lmax.disruptor.BlockingWaitStrategy',
- 'com.lmax.disruptor.BusySpinWaitStrategy',
'com.lmax.disruptor.EventFactory',
'com.lmax.disruptor.EventTranslator',
'com.lmax.disruptor.EventTranslatorTwoArg',
@@ -262,10 +259,7 @@ tasks.named("thirdPartyAudit").configure {
'com.lmax.disruptor.RingBuffer',
'com.lmax.disruptor.Sequence',
'com.lmax.disruptor.SequenceReportingEventHandler',
- 'com.lmax.disruptor.SleepingWaitStrategy',
- 'com.lmax.disruptor.TimeoutBlockingWaitStrategy',
'com.lmax.disruptor.WaitStrategy',
- 'com.lmax.disruptor.YieldingWaitStrategy',
'com.lmax.disruptor.dsl.Disruptor',
'com.lmax.disruptor.dsl.ProducerType',
'javax.jms.Connection',
@@ -284,23 +278,17 @@ tasks.named("thirdPartyAudit").configure {
'javax.mail.Transport',
'javax.mail.internet.InternetAddress',
'javax.mail.internet.InternetHeaders',
- 'javax.mail.internet.MimeBodyPart',
'javax.mail.internet.MimeMessage',
'javax.mail.internet.MimeMultipart',
'javax.mail.internet.MimeUtility',
- 'javax.mail.util.ByteArrayDataSource',
'org.apache.commons.compress.compressors.CompressorStreamFactory',
'org.apache.commons.compress.utils.IOUtils',
'org.apache.commons.csv.CSVFormat',
'org.apache.commons.csv.QuoteMode',
- 'org.apache.kafka.clients.producer.KafkaProducer',
'org.apache.kafka.clients.producer.Producer',
- 'org.apache.kafka.clients.producer.ProducerRecord',
'org.apache.kafka.clients.producer.RecordMetadata',
'org.codehaus.stax2.XMLStreamWriter2',
'org.jctools.queues.MpscArrayQueue',
- 'org.osgi.framework.AdaptPermission',
- 'org.osgi.framework.AdminPermission',
'org.osgi.framework.Bundle',
'org.osgi.framework.BundleActivator',
'org.osgi.framework.BundleContext',
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java
index 666c0a87a7acb..eeee000fa9c2d 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java
@@ -234,11 +234,7 @@ public void testIndex() {
String[] indexShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" };
interceptTransportActions(indexShardActions);
- IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias(), "type", "id").source(
- Requests.INDEX_CONTENT_TYPE,
- "field",
- "value"
- );
+ IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias()).id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value");
internalCluster().coordOnlyNodeClient().index(indexRequest).actionGet();
clearInterceptedActions();
@@ -249,7 +245,7 @@ public void testDelete() {
String[] deleteShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" };
interceptTransportActions(deleteShardActions);
- DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias(), "type", "id");
+ DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias()).id("id");
internalCluster().coordOnlyNodeClient().delete(deleteRequest).actionGet();
clearInterceptedActions();
@@ -263,7 +259,7 @@ public void testUpdate() {
String indexOrAlias = randomIndexOrAlias();
client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get();
- UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1");
+ UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1");
UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet();
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
@@ -277,7 +273,7 @@ public void testUpdateUpsert() {
interceptTransportActions(updateShardActions);
String indexOrAlias = randomIndexOrAlias();
- UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").upsert(Requests.INDEX_CONTENT_TYPE, "field", "value")
+ UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "id").upsert(Requests.INDEX_CONTENT_TYPE, "field", "value")
.doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1");
UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet();
assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult());
@@ -293,7 +289,7 @@ public void testUpdateDelete() {
String indexOrAlias = randomIndexOrAlias();
client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get();
- UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").script(
+ UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "id").script(
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op='delete'", Collections.emptyMap())
);
UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet();
@@ -312,19 +308,19 @@ public void testBulk() {
int numIndexRequests = iterations(1, 10);
for (int i = 0; i < numIndexRequests; i++) {
String indexOrAlias = randomIndexOrAlias();
- bulkRequest.add(new IndexRequest(indexOrAlias, "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"));
+ bulkRequest.add(new IndexRequest(indexOrAlias).id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"));
indices.add(indexOrAlias);
}
int numDeleteRequests = iterations(1, 10);
for (int i = 0; i < numDeleteRequests; i++) {
String indexOrAlias = randomIndexOrAlias();
- bulkRequest.add(new DeleteRequest(indexOrAlias, "type", "id"));
+ bulkRequest.add(new DeleteRequest(indexOrAlias).id("id"));
indices.add(indexOrAlias);
}
int numUpdateRequests = iterations(1, 10);
for (int i = 0; i < numUpdateRequests; i++) {
String indexOrAlias = randomIndexOrAlias();
- bulkRequest.add(new UpdateRequest(indexOrAlias, "type", "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1"));
+ bulkRequest.add(new UpdateRequest(indexOrAlias, "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1"));
indices.add(indexOrAlias);
}
@@ -338,7 +334,7 @@ public void testGet() {
String getShardAction = GetAction.NAME + "[s]";
interceptTransportActions(getShardAction);
- GetRequest getRequest = new GetRequest(randomIndexOrAlias(), "type", "id");
+ GetRequest getRequest = new GetRequest(randomIndexOrAlias(), "id");
internalCluster().coordOnlyNodeClient().get(getRequest).actionGet();
clearInterceptedActions();
@@ -349,7 +345,7 @@ public void testExplain() {
String explainShardAction = ExplainAction.NAME + "[s]";
interceptTransportActions(explainShardAction);
- ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "type", "id").query(QueryBuilders.matchAllQuery());
+ ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "id").query(QueryBuilders.matchAllQuery());
internalCluster().coordOnlyNodeClient().explain(explainRequest).actionGet();
clearInterceptedActions();
@@ -360,7 +356,7 @@ public void testTermVector() {
String termVectorShardAction = TermVectorsAction.NAME + "[s]";
interceptTransportActions(termVectorShardAction);
- TermVectorsRequest termVectorsRequest = new TermVectorsRequest(randomIndexOrAlias(), "type", "id");
+ TermVectorsRequest termVectorsRequest = new TermVectorsRequest(randomIndexOrAlias(), "id");
internalCluster().coordOnlyNodeClient().termVectors(termVectorsRequest).actionGet();
clearInterceptedActions();
@@ -376,7 +372,7 @@ public void testMultiTermVector() {
int numDocs = iterations(1, 30);
for (int i = 0; i < numDocs; i++) {
String indexOrAlias = randomIndexOrAlias();
- multiTermVectorsRequest.add(indexOrAlias, "type", Integer.toString(i));
+ multiTermVectorsRequest.add(indexOrAlias, Integer.toString(i));
indices.add(indexOrAlias);
}
internalCluster().coordOnlyNodeClient().multiTermVectors(multiTermVectorsRequest).actionGet();
@@ -394,7 +390,7 @@ public void testMultiGet() {
int numDocs = iterations(1, 30);
for (int i = 0; i < numDocs; i++) {
String indexOrAlias = randomIndexOrAlias();
- multiGetRequest.add(indexOrAlias, "type", Integer.toString(i));
+ multiGetRequest.add(indexOrAlias, Integer.toString(i));
indices.add(indexOrAlias);
}
internalCluster().coordOnlyNodeClient().multiGet(multiGetRequest).actionGet();
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/ListenerActionIT.java b/server/src/internalClusterTest/java/org/opensearch/action/ListenerActionIT.java
index a0ddf68355a63..1512fa4934ca1 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/ListenerActionIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/ListenerActionIT.java
@@ -48,7 +48,7 @@ public void testThreadedListeners() throws Throwable {
final AtomicReference threadName = new AtomicReference<>();
Client client = client();
- IndexRequest request = new IndexRequest("test", "type", "1");
+ IndexRequest request = new IndexRequest("test").id("1");
if (randomBoolean()) {
// set the source, without it, we will have a verification failure
request.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1");
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java
index 3516c7a145aea..9c3cf4c28e55e 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java
@@ -379,14 +379,12 @@ public void testSearchTaskDescriptions() {
headers.put(Task.X_OPAQUE_ID, "my_id");
headers.put("Foo-Header", "bar");
headers.put("Custom-Task-Header", "my_value");
- assertSearchResponse(
- client().filterWithHeader(headers).prepareSearch("test").setTypes("doc").setQuery(QueryBuilders.matchAllQuery()).get()
- );
+ assertSearchResponse(client().filterWithHeader(headers).prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).get());
// the search operation should produce one main task
List mainTask = findEvents(SearchAction.NAME, Tuple::v1);
assertEquals(1, mainTask.size());
- assertThat(mainTask.get(0).getDescription(), startsWith("indices[test], types[doc], search_type["));
+ assertThat(mainTask.get(0).getDescription(), startsWith("indices[test], search_type["));
assertThat(mainTask.get(0).getDescription(), containsString("\"query\":{\"match_all\""));
assertTaskHeaders(mainTask.get(0));
@@ -829,14 +827,12 @@ public void testTaskStoringSuccessfulResult() throws Exception {
assertNoFailures(client().admin().indices().prepareRefresh(TaskResultsService.TASK_INDEX).get());
SearchResponse searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
- .setTypes(TaskResultsService.TASK_TYPE)
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.getAction())))
.get();
assertEquals(1L, searchResponse.getHits().getTotalHits().value);
searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
- .setTypes(TaskResultsService.TASK_TYPE)
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.getTaskId().getNodeId())))
.get();
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java
index d87bbbb0926c5..a1ddc4a27a1f9 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java
@@ -240,10 +240,8 @@ public void testShrinkIndexPrimaryTerm() throws Exception {
final String s = Integer.toString(id);
final int hash = Math.floorMod(Murmur3HashFunction.hash(s), numberOfShards);
if (hash == shardId) {
- final IndexRequest request = new IndexRequest("source", "type", s).source(
- "{ \"f\": \"" + s + "\"}",
- XContentType.JSON
- );
+ final IndexRequest request = new IndexRequest("source").id(s)
+ .source("{ \"f\": \"" + s + "\"}", XContentType.JSON);
client().index(request).get();
break;
} else {
@@ -667,7 +665,7 @@ public void testShrinkCommitsMergeOnIdle() throws Exception {
IndexService indexShards = service.indexService(target.getIndex());
IndexShard shard = indexShards.getShard(0);
assertTrue(shard.isActive());
- shard.checkIdle(0);
+ shard.flushOnIdle(0);
assertFalse(shard.isActive());
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java
index 86974322388ab..14d337c34daa5 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java
@@ -229,7 +229,7 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha
assertHitCount(client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs);
assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs);
for (int i = 0; i < numDocs; i++) {
- GetResponse getResponse = client().prepareGet("first_split", "t1", Integer.toString(i)).setRouting(routingValue[i]).get();
+ GetResponse getResponse = client().prepareGet("first_split", Integer.toString(i)).setRouting(routingValue[i]).get();
assertTrue(getResponse.isExists());
}
@@ -274,7 +274,7 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha
}
flushAndRefresh();
for (int i = 0; i < numDocs; i++) {
- GetResponse getResponse = client().prepareGet("second_split", "t1", Integer.toString(i)).setRouting(routingValue[i]).get();
+ GetResponse getResponse = client().prepareGet("second_split", Integer.toString(i)).setRouting(routingValue[i]).get();
assertTrue(getResponse.isExists());
}
assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs);
@@ -345,10 +345,8 @@ public void testSplitIndexPrimaryTerm() throws Exception {
final String s = Integer.toString(id);
final int hash = Math.floorMod(Murmur3HashFunction.hash(s), numberOfShards);
if (hash == shardId) {
- final IndexRequest request = new IndexRequest("source", "type", s).source(
- "{ \"f\": \"" + s + "\"}",
- XContentType.JSON
- );
+ final IndexRequest request = new IndexRequest("source").id(s)
+ .source("{ \"f\": \"" + s + "\"}", XContentType.JSON);
client().index(request).get();
break;
} else {
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java
index e33b140d288ac..ab934170b594a 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java
@@ -88,7 +88,6 @@ public void testBulkIndexCreatesMapping() throws Exception {
assertBusy(() -> {
GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings().get();
assertTrue(mappingsResponse.getMappings().containsKey("logstash-2014.03.30"));
- assertTrue(mappingsResponse.getMappings().get("logstash-2014.03.30").containsKey("logs"));
});
}
@@ -117,7 +116,7 @@ public void testBulkWithWriteIndexAndRouting() {
.setSettings(twoShardsSettings)
.get();
- IndexRequest indexRequestWithAlias = new IndexRequest("alias1", "type", "id");
+ IndexRequest indexRequestWithAlias = new IndexRequest("alias1").id("id");
if (randomBoolean()) {
indexRequestWithAlias.routing("1");
}
@@ -127,19 +126,19 @@ public void testBulkWithWriteIndexAndRouting() {
assertThat(bulkResponse.getItems()[0].getResponse().getShardId().getId(), equalTo(0));
assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L));
assertThat(bulkResponse.getItems()[0].getResponse().status(), equalTo(RestStatus.CREATED));
- assertThat(client().prepareGet("index3", "type", "id").setRouting("1").get().getSource().get("foo"), equalTo("baz"));
+ assertThat(client().prepareGet("index3", "id").setRouting("1").get().getSource().get("foo"), equalTo("baz"));
bulkResponse = client().prepareBulk().add(client().prepareUpdate("alias1", "type", "id").setDoc("foo", "updated")).get();
assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures());
- assertThat(client().prepareGet("index3", "type", "id").setRouting("1").get().getSource().get("foo"), equalTo("updated"));
+ assertThat(client().prepareGet("index3", "id").setRouting("1").get().getSource().get("foo"), equalTo("updated"));
bulkResponse = client().prepareBulk().add(client().prepareDelete("alias1", "type", "id")).get();
assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures());
- assertFalse(client().prepareGet("index3", "type", "id").setRouting("1").get().isExists());
+ assertFalse(client().prepareGet("index3", "id").setRouting("1").get().isExists());
}
// allowing the auto-generated timestamp to externally be set would allow making the index inconsistent with duplicate docs
public void testExternallySetAutoGeneratedTimestamp() {
- IndexRequest indexRequest = new IndexRequest("index1", "_doc").source(Collections.singletonMap("foo", "baz"));
+ IndexRequest indexRequest = new IndexRequest("index1").source(Collections.singletonMap("foo", "baz"));
indexRequest.process(Version.CURRENT, null, null); // sets the timestamp
if (randomBoolean()) {
indexRequest.id("test");
@@ -163,7 +162,7 @@ public void testBulkWithGlobalDefaults() throws Exception {
{
createSamplePipeline("pipeline");
- BulkRequestBuilder bulkBuilder = client().prepareBulk("test", "type1").routing("routing").pipeline("pipeline");
+ BulkRequestBuilder bulkBuilder = client().prepareBulk("test").routing("routing").pipeline("pipeline");
bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON);
BulkResponse bulkItemResponses = bulkBuilder.get();
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java
index 20791f46ade59..850034bc631b1 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java
@@ -40,14 +40,10 @@
import org.opensearch.client.Client;
import org.opensearch.client.Requests;
import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.Strings;
-import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.ByteSizeUnit;
import org.opensearch.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
-import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.test.OpenSearchIntegTestCase;
import java.util.Arrays;
@@ -173,7 +169,6 @@ public void testBulkProcessorConcurrentRequests() throws Exception {
for (BulkItemResponse bulkItemResponse : listener.bulkItems) {
assertThat(bulkItemResponse.getFailureMessage(), bulkItemResponse.isFailed(), equalTo(false));
assertThat(bulkItemResponse.getIndex(), equalTo("test"));
- assertThat(bulkItemResponse.getType(), equalTo("test"));
// with concurrent requests > 1 we can't rely on the order of the bulk requests
assertThat(Integer.valueOf(bulkItemResponse.getId()), both(greaterThan(0)).and(lessThanOrEqualTo(numDocs)));
// we do want to check that we don't get duplicate ids back
@@ -253,17 +248,14 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception
if (randomBoolean()) {
testDocs++;
processor.add(
- new IndexRequest("test", "test", Integer.toString(testDocs)).source(Requests.INDEX_CONTENT_TYPE, "field", "value")
+ new IndexRequest("test").id(Integer.toString(testDocs)).source(Requests.INDEX_CONTENT_TYPE, "field", "value")
);
- multiGetRequestBuilder.add("test", "test", Integer.toString(testDocs));
+ multiGetRequestBuilder.add("test", Integer.toString(testDocs));
} else {
testReadOnlyDocs++;
processor.add(
- new IndexRequest("test-ro", "test", Integer.toString(testReadOnlyDocs)).source(
- Requests.INDEX_CONTENT_TYPE,
- "field",
- "value"
- )
+ new IndexRequest("test-ro").id(Integer.toString(testReadOnlyDocs))
+ .source(Requests.INDEX_CONTENT_TYPE, "field", "value")
);
}
}
@@ -280,7 +272,6 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception
Set readOnlyIds = new HashSet<>();
for (BulkItemResponse bulkItemResponse : listener.bulkItems) {
assertThat(bulkItemResponse.getIndex(), either(equalTo("test")).or(equalTo("test-ro")));
- assertThat(bulkItemResponse.getType(), equalTo("test"));
if (bulkItemResponse.getIndex().equals("test")) {
assertThat(bulkItemResponse.isFailed(), equalTo(false));
// with concurrent requests > 1 we can't rely on the order of the bulk requests
@@ -302,25 +293,11 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception
private static MultiGetRequestBuilder indexDocs(Client client, BulkProcessor processor, int numDocs) throws Exception {
MultiGetRequestBuilder multiGetRequestBuilder = client.prepareMultiGet();
for (int i = 1; i <= numDocs; i++) {
- if (randomBoolean()) {
- processor.add(
- new IndexRequest("test", "test", Integer.toString(i)).source(
- Requests.INDEX_CONTENT_TYPE,
- "field",
- randomRealisticUnicodeOfLengthBetween(1, 30)
- )
- );
- } else {
- final String source = "{ \"index\":{\"_index\":\"test\",\"_type\":\"test\",\"_id\":\""
- + Integer.toString(i)
- + "\"} }\n"
- + Strings.toString(
- JsonXContent.contentBuilder().startObject().field("field", randomRealisticUnicodeOfLengthBetween(1, 30)).endObject()
- )
- + "\n";
- processor.add(new BytesArray(source), null, null, XContentType.JSON);
- }
- multiGetRequestBuilder.add("test", "test", Integer.toString(i));
+ processor.add(
+ new IndexRequest("test").id(Integer.toString(i))
+ .source(Requests.INDEX_CONTENT_TYPE, "field", randomRealisticUnicodeOfLengthBetween(1, 30))
+ );
+ multiGetRequestBuilder.add("test", Integer.toString(i));
}
return multiGetRequestBuilder;
}
@@ -330,7 +307,6 @@ private static void assertResponseItems(List bulkItemResponses
int i = 1;
for (BulkItemResponse bulkItemResponse : bulkItemResponses) {
assertThat(bulkItemResponse.getIndex(), equalTo("test"));
- assertThat(bulkItemResponse.getType(), equalTo("test"));
assertThat(bulkItemResponse.getId(), equalTo(Integer.toString(i++)));
assertThat(
"item " + i + " failed with cause: " + bulkItemResponse.getFailureMessage(),
@@ -345,7 +321,6 @@ private static void assertMultiGetResponse(MultiGetResponse multiGetResponse, in
int i = 1;
for (MultiGetItemResponse multiGetItemResponse : multiGetResponse) {
assertThat(multiGetItemResponse.getIndex(), equalTo("test"));
- assertThat(multiGetItemResponse.getType(), equalTo("test"));
assertThat(multiGetItemResponse.getId(), equalTo(Integer.toString(i++)));
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java
index bcda78ed6f788..68cb46fd20e50 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java
@@ -159,11 +159,7 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure)
client().admin().indices().refresh(new RefreshRequest()).get();
- SearchResponse results = client().prepareSearch(INDEX_NAME)
- .setTypes(TYPE_NAME)
- .setQuery(QueryBuilders.matchAllQuery())
- .setSize(0)
- .get();
+ SearchResponse results = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get();
if (rejectedExecutionExpected) {
assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps));
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
index 570d1055a7a6c..f2b83fc92cc63 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
@@ -177,17 +177,17 @@ public void testBulkUpdateSimple() throws Exception {
assertThat(bulkResponse.getItems()[2].getResponse().getId(), equalTo("3"));
assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(2L));
- GetResponse getResponse = client().prepareGet().setIndex("test").setType("type1").setId("1").execute().actionGet();
+ GetResponse getResponse = client().prepareGet().setIndex("test").setId("1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(2L));
assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(2L));
- getResponse = client().prepareGet().setIndex("test").setType("type1").setId("2").execute().actionGet();
+ getResponse = client().prepareGet().setIndex("test").setId("2").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(2L));
assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(3L));
- getResponse = client().prepareGet().setIndex("test").setType("type1").setId("3").execute().actionGet();
+ getResponse = client().prepareGet().setIndex("test").setId("3").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(2L));
assertThat(getResponse.getSource().get("field1").toString(), equalTo("test"));
@@ -217,15 +217,15 @@ public void testBulkUpdateSimple() throws Exception {
assertThat(bulkResponse.getItems()[2].getResponse().getIndex(), equalTo("test"));
assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(3L));
- getResponse = client().prepareGet().setIndex("test").setType("type1").setId("6").execute().actionGet();
+ getResponse = client().prepareGet().setIndex("test").setId("6").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(1L));
assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(0L));
- getResponse = client().prepareGet().setIndex("test").setType("type1").setId("7").execute().actionGet();
+ getResponse = client().prepareGet().setIndex("test").setId("7").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false));
- getResponse = client().prepareGet().setIndex("test").setType("type1").setId("2").execute().actionGet();
+ getResponse = client().prepareGet().setIndex("test").setId("2").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(3L));
assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(4L));
@@ -440,14 +440,13 @@ public void testBulkUpdateLargerVolume() throws Exception {
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getVersion(), equalTo(1L));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
- assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(1L));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(1));
for (int j = 0; j < 5; j++) {
- GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).execute().actionGet();
+ GetResponse getResponse = client().prepareGet("test", Integer.toString(i)).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(1L));
assertThat(((Number) getResponse.getSource().get("counter")).longValue(), equalTo(1L));
@@ -480,7 +479,6 @@ public void testBulkUpdateLargerVolume() throws Exception {
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getVersion(), equalTo(2L));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
- assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(2L));
@@ -504,7 +502,6 @@ public void testBulkUpdateLargerVolume() throws Exception {
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(id)));
assertThat(response.getItems()[i].getVersion(), equalTo(3L));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
- assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
}
}
@@ -526,7 +523,6 @@ public void testBulkUpdateLargerVolume() throws Exception {
assertThat(response.getItems()[i].getItemId(), equalTo(i));
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
- assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
}
@@ -550,10 +546,9 @@ public void testBulkUpdateLargerVolume() throws Exception {
assertThat(itemResponse.getItemId(), equalTo(i));
assertThat(itemResponse.getId(), equalTo(Integer.toString(i)));
assertThat(itemResponse.getIndex(), equalTo("test"));
- assertThat(itemResponse.getType(), equalTo("type1"));
assertThat(itemResponse.getOpType(), equalTo(OpType.UPDATE));
for (int j = 0; j < 5; j++) {
- GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).get();
+ GetResponse getResponse = client().prepareGet("test", Integer.toString(i)).get();
assertThat(getResponse.isExists(), equalTo(false));
}
}
@@ -661,21 +656,21 @@ public void testThatInvalidIndexNamesShouldNotBreakCompleteBulkRequest() {
// issue 6630
public void testThatFailedUpdateRequestReturnsCorrectType() throws Exception {
BulkResponse indexBulkItemResponse = client().prepareBulk()
- .add(new IndexRequest("test", "type", "3").source("{ \"title\" : \"Great Title of doc 3\" }", XContentType.JSON))
- .add(new IndexRequest("test", "type", "4").source("{ \"title\" : \"Great Title of doc 4\" }", XContentType.JSON))
- .add(new IndexRequest("test", "type", "5").source("{ \"title\" : \"Great Title of doc 5\" }", XContentType.JSON))
- .add(new IndexRequest("test", "type", "6").source("{ \"title\" : \"Great Title of doc 6\" }", XContentType.JSON))
+ .add(new IndexRequest("test").id("3").source("{ \"title\" : \"Great Title of doc 3\" }", XContentType.JSON))
+ .add(new IndexRequest("test").id("4").source("{ \"title\" : \"Great Title of doc 4\" }", XContentType.JSON))
+ .add(new IndexRequest("test").id("5").source("{ \"title\" : \"Great Title of doc 5\" }", XContentType.JSON))
+ .add(new IndexRequest("test").id("6").source("{ \"title\" : \"Great Title of doc 6\" }", XContentType.JSON))
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
.get();
assertNoFailures(indexBulkItemResponse);
BulkResponse bulkItemResponse = client().prepareBulk()
- .add(new IndexRequest("test", "type", "1").source("{ \"title\" : \"Great Title of doc 1\" }", XContentType.JSON))
- .add(new IndexRequest("test", "type", "2").source("{ \"title\" : \"Great Title of doc 2\" }", XContentType.JSON))
- .add(new UpdateRequest("test", "type", "3").doc("{ \"date\" : \"2014-01-30T23:59:57\"}", XContentType.JSON))
- .add(new UpdateRequest("test", "type", "4").doc("{ \"date\" : \"2014-13-30T23:59:57\"}", XContentType.JSON))
- .add(new DeleteRequest("test", "type", "5"))
- .add(new DeleteRequest("test", "type", "6"))
+ .add(new IndexRequest("test").id("1").source("{ \"title\" : \"Great Title of doc 1\" }", XContentType.JSON))
+ .add(new IndexRequest("test").id("2").source("{ \"title\" : \"Great Title of doc 2\" }", XContentType.JSON))
+ .add(new UpdateRequest("test", "3").doc("{ \"date\" : \"2014-01-30T23:59:57\"}", XContentType.JSON))
+ .add(new UpdateRequest("test", "4").doc("{ \"date\" : \"2014-13-30T23:59:57\"}", XContentType.JSON))
+ .add(new DeleteRequest("test", "5"))
+ .add(new DeleteRequest("test", "6"))
.get();
assertNoFailures(indexBulkItemResponse);
@@ -696,11 +691,11 @@ private static String indexOrAlias() {
public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception {
createIndex("bulkindex1", "bulkindex2");
BulkRequest bulkRequest = new BulkRequest();
- bulkRequest.add(new IndexRequest("bulkindex1", "index1_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1"))
- .add(new IndexRequest("bulkindex2", "index2_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2"))
- .add(new IndexRequest("bulkindex2", "index2_type").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2"))
- .add(new UpdateRequest("bulkindex2", "index2_type", "2").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar"))
- .add(new DeleteRequest("bulkindex2", "index2_type", "3"))
+ bulkRequest.add(new IndexRequest("bulkindex1").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1"))
+ .add(new IndexRequest("bulkindex2").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2"))
+ .add(new IndexRequest("bulkindex2").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2"))
+ .add(new UpdateRequest("bulkindex2", "2").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar"))
+ .add(new DeleteRequest("bulkindex2", "3"))
.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
client().bulk(bulkRequest).get();
@@ -710,11 +705,11 @@ public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception {
assertBusy(() -> assertAcked(client().admin().indices().prepareClose("bulkindex2")));
BulkRequest bulkRequest2 = new BulkRequest();
- bulkRequest2.add(new IndexRequest("bulkindex1", "index1_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1"))
- .add(new IndexRequest("bulkindex2", "index2_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2"))
- .add(new IndexRequest("bulkindex2", "index2_type").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2"))
- .add(new UpdateRequest("bulkindex2", "index2_type", "2").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar"))
- .add(new DeleteRequest("bulkindex2", "index2_type", "3"))
+ bulkRequest2.add(new IndexRequest("bulkindex1").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1"))
+ .add(new IndexRequest("bulkindex2").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2"))
+ .add(new IndexRequest("bulkindex2").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2"))
+ .add(new UpdateRequest("bulkindex2", "2").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar"))
+ .add(new DeleteRequest("bulkindex2", "3"))
.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
BulkResponse bulkResponse = client().bulk(bulkRequest2).get();
@@ -730,9 +725,9 @@ public void testFailedRequestsOnClosedIndex() throws Exception {
assertBusy(() -> assertAcked(client().admin().indices().prepareClose("bulkindex1")));
BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(RefreshPolicy.IMMEDIATE);
- bulkRequest.add(new IndexRequest("bulkindex1", "index1_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1"))
- .add(new UpdateRequest("bulkindex1", "index1_type", "1").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar"))
- .add(new DeleteRequest("bulkindex1", "index1_type", "1"));
+ bulkRequest.add(new IndexRequest("bulkindex1").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1"))
+ .add(new UpdateRequest("bulkindex1", "1").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar"))
+ .add(new DeleteRequest("bulkindex1", "1"));
BulkResponse bulkResponse = client().bulk(bulkRequest).get();
assertThat(bulkResponse.hasFailures(), is(true));
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java
index ac2be1a15c43e..52333061f3e6b 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java
@@ -95,7 +95,7 @@ public void testNoSuchDoc() throws Exception {
client().prepareIndex("test", "type1", "666").setSource("field", "foo bar").execute().actionGet();
refresh();
for (int i = 0; i < 20; i++) {
- ActionFuture termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "" + i));
+ ActionFuture termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "" + i));
TermVectorsResponse actionGet = termVector.actionGet();
assertThat(actionGet, notNullValue());
assertThat(actionGet.getIndex(), equalTo("test"));
@@ -122,7 +122,7 @@ public void testExistingFieldWithNoTermVectorsNoNPE() throws Exception {
client().prepareIndex("test", "type1", "0").setSource("existingfield", "?").execute().actionGet();
refresh();
ActionFuture termVector = client().termVectors(
- new TermVectorsRequest(indexOrAlias(), "type1", "0").selectedFields(new String[] { "existingfield" })
+ new TermVectorsRequest(indexOrAlias(), "0").selectedFields(new String[] { "existingfield" })
);
// lets see if the null term vectors are caught...
@@ -150,7 +150,7 @@ public void testExistingFieldButNotInDocNPE() throws Exception {
client().prepareIndex("test", "type1", "0").setSource("anotherexistingfield", 1).execute().actionGet();
refresh();
ActionFuture termVectors = client().termVectors(
- new TermVectorsRequest(indexOrAlias(), "type1", "0").selectedFields(randomBoolean() ? new String[] { "existingfield" } : null)
+ new TermVectorsRequest(indexOrAlias(), "0").selectedFields(randomBoolean() ? new String[] { "existingfield" } : null)
.termStatistics(true)
.fieldStatistics(true)
);
@@ -191,9 +191,7 @@ public void testNotIndexedField() throws Exception {
indexRandom(true, indexBuilders);
for (int i = 0; i < 4; i++) {
- TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i))
- .setSelectedFields("field" + i)
- .get();
+ TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), String.valueOf(i)).setSelectedFields("field" + i).get();
assertThat(resp, notNullValue());
assertThat(resp.isExists(), equalTo(true));
assertThat(resp.getIndex(), equalTo("test"));
@@ -201,9 +199,7 @@ public void testNotIndexedField() throws Exception {
}
for (int i = 4; i < 6; i++) {
- TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i))
- .setSelectedFields("field" + i)
- .get();
+ TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), String.valueOf(i)).setSelectedFields("field" + i).get();
assertThat(resp.getIndex(), equalTo("test"));
assertThat("field" + i + " :", resp.getFields().terms("field" + i), notNullValue());
}
@@ -245,7 +241,7 @@ public void testSimpleTermVectors() throws IOException {
refresh();
}
for (int i = 0; i < 10; i++) {
- TermVectorsRequestBuilder resp = client().prepareTermVectors(indexOrAlias(), "type1", Integer.toString(i))
+ TermVectorsRequestBuilder resp = client().prepareTermVectors(indexOrAlias(), Integer.toString(i))
.setPayloads(true)
.setOffsets(true)
.setPositions(true)
@@ -362,7 +358,7 @@ public void testRandomSingleTermVectors() throws IOException {
boolean isPositionsRequested = randomBoolean();
String infoString = createInfoString(isPositionsRequested, isOffsetRequested, optionString);
for (int i = 0; i < 10; i++) {
- TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "_doc", Integer.toString(i))
+ TermVectorsRequestBuilder resp = client().prepareTermVectors("test", Integer.toString(i))
.setOffsets(isOffsetRequested)
.setPositions(isPositionsRequested)
.setSelectedFields();
@@ -501,7 +497,7 @@ public void testSimpleTermVectorsWithGenerate() throws IOException {
}
for (int i = 0; i < 10; i++) {
- TermVectorsResponse response = client().prepareTermVectors("test", "type1", Integer.toString(i))
+ TermVectorsResponse response = client().prepareTermVectors("test", Integer.toString(i))
.setPayloads(true)
.setOffsets(true)
.setPositions(true)
@@ -590,7 +586,7 @@ public void testDuelWithAndWithoutTermVectors() throws IOException, ExecutionExc
for (int id = 0; id < content.length; id++) {
Fields[] fields = new Fields[2];
for (int j = 0; j < indexNames.length; j++) {
- TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], "type1", String.valueOf(id))
+ TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], String.valueOf(id))
.setOffsets(true)
.setPositions(true)
.setSelectedFields("field1")
@@ -661,7 +657,7 @@ public void testSimpleWildCards() throws IOException {
client().prepareIndex("test", "type1", "0").setSource(source).get();
refresh();
- TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0").setSelectedFields("field*").get();
+ TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "0").setSelectedFields("field*").get();
assertThat("Doc doesn't exists but should", response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat("All term vectors should have been generated", response.getFields().size(), equalTo(numFields));
@@ -692,7 +688,7 @@ public void testArtificialVsExisting() throws ExecutionException, InterruptedExc
for (int i = 0; i < content.length; i++) {
// request tvs from existing document
- TermVectorsResponse respExisting = client().prepareTermVectors("test", "type1", String.valueOf(i))
+ TermVectorsResponse respExisting = client().prepareTermVectors("test", String.valueOf(i))
.setOffsets(true)
.setPositions(true)
.setFieldStatistics(true)
@@ -703,7 +699,6 @@ public void testArtificialVsExisting() throws ExecutionException, InterruptedExc
// request tvs from artificial document
TermVectorsResponse respArtificial = client().prepareTermVectors()
.setIndex("test")
- .setType("type1")
.setRouting(String.valueOf(i)) // ensure we get the stats from the same shard as existing doc
.setDoc(jsonBuilder().startObject().field("field1", content[i]).endObject())
.setOffsets(true)
@@ -728,7 +723,6 @@ public void testArtificialNoDoc() throws IOException {
String text = "the quick brown fox jumps over the lazy dog";
TermVectorsResponse resp = client().prepareTermVectors()
.setIndex("test")
- .setType("type1")
.setDoc(jsonBuilder().startObject().field("field1", text).endObject())
.setOffsets(true)
.setPositions(true)
@@ -798,15 +792,13 @@ public void testPerFieldAnalyzer() throws IOException {
}
// selected fields not specified
- TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0")
- .setPerFieldAnalyzer(perFieldAnalyzer)
- .get();
+ TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "0").setPerFieldAnalyzer(perFieldAnalyzer).get();
// should return all fields that have terms vectors, some with overridden analyzer
checkAnalyzedFields(response.getFields(), withTermVectors, perFieldAnalyzer);
// selected fields specified including some not in the mapping
- response = client().prepareTermVectors(indexOrAlias(), "type1", "0")
+ response = client().prepareTermVectors(indexOrAlias(), "0")
.setSelectedFields(selectedFields.toArray(Strings.EMPTY_ARRAY))
.setPerFieldAnalyzer(perFieldAnalyzer)
.get();
@@ -848,7 +840,7 @@ public void testTermVectorsWithVersion() {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1)));
ensureGreen();
- TermVectorsResponse response = client().prepareTermVectors("test", "type1", "1").get();
+ TermVectorsResponse response = client().prepareTermVectors("test", "1").get();
assertThat(response.isExists(), equalTo(false));
logger.info("--> index doc 1");
@@ -857,18 +849,18 @@ public void testTermVectorsWithVersion() {
// From translog:
// version 0 means ignore version, which is the default
- response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get();
+ response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getVersion(), equalTo(1L));
- response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).get();
+ response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(1).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getVersion(), equalTo(1L));
try {
- client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get();
+ client().prepareGet(indexOrAlias(), "1").setVersion(2).get();
fail();
} catch (VersionConflictEngineException e) {
// all good
@@ -878,20 +870,20 @@ public void testTermVectorsWithVersion() {
refresh();
// version 0 means ignore version, which is the default
- response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
+ response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(1L));
- response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
+ response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(1).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(1L));
try {
- client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get();
+ client().prepareGet(indexOrAlias(), "1").setVersion(2).setRealtime(false).get();
fail();
} catch (VersionConflictEngineException e) {
// all good
@@ -903,20 +895,20 @@ public void testTermVectorsWithVersion() {
// From translog:
// version 0 means ignore version, which is the default
- response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get();
+ response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(2L));
try {
- client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get();
+ client().prepareGet(indexOrAlias(), "1").setVersion(1).get();
fail();
} catch (VersionConflictEngineException e) {
// all good
}
- response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(2).get();
+ response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(2).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@@ -926,20 +918,20 @@ public void testTermVectorsWithVersion() {
refresh();
// version 0 means ignore version, which is the default
- response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
+ response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(2L));
try {
- client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
+ client().prepareGet(indexOrAlias(), "1").setVersion(1).setRealtime(false).get();
fail();
} catch (VersionConflictEngineException e) {
// all good
}
- response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get();
+ response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(2).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@@ -969,7 +961,7 @@ public void testFilterLength() throws ExecutionException, InterruptedException,
TermVectorsResponse response;
for (int i = 0; i < numTerms; i++) {
filterSettings.minWordLength = numTerms - i;
- response = client().prepareTermVectors("test", "type1", "1")
+ response = client().prepareTermVectors("test", "1")
.setSelectedFields("tags")
.setFieldStatistics(true)
.setTermStatistics(true)
@@ -1004,7 +996,7 @@ public void testFilterTermFreq() throws ExecutionException, InterruptedException
TermVectorsResponse response;
for (int i = 0; i < numTerms; i++) {
filterSettings.maxNumTerms = i + 1;
- response = client().prepareTermVectors("test", "type1", "1")
+ response = client().prepareTermVectors("test", "1")
.setSelectedFields("tags")
.setFieldStatistics(true)
.setTermStatistics(true)
@@ -1037,7 +1029,7 @@ public void testFilterDocFreq() throws ExecutionException, InterruptedException,
TermVectorsResponse response;
for (int i = 0; i < numDocs; i++) {
filterSettings.maxNumTerms = i + 1;
- response = client().prepareTermVectors("test", "type1", (numDocs - 1) + "")
+ response = client().prepareTermVectors("test", (numDocs - 1) + "")
.setSelectedFields("tags")
.setFieldStatistics(true)
.setTermStatistics(true)
@@ -1068,7 +1060,6 @@ public void testArtificialDocWithPreference() throws InterruptedException, IOExc
for (Integer shardId : shardIds) {
TermVectorsResponse tvResponse = client().prepareTermVectors()
.setIndex("test")
- .setType("type1")
.setPreference("_shards:" + shardId)
.setDoc(jsonBuilder().startObject().field("field1", "random permutation").endObject())
.setFieldStatistics(true)
@@ -1132,7 +1123,7 @@ public void testWithKeywordAndNormalizer() throws IOException, ExecutionExceptio
for (int id = 0; id < content.length; id++) {
Fields[] fields = new Fields[2];
for (int j = 0; j < indexNames.length; j++) {
- TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], "type1", String.valueOf(id))
+ TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], String.valueOf(id))
.setOffsets(true)
.setPositions(true)
.setSelectedFields("field1", "field2")
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java
index da9d7876223a9..1228ec85c2b08 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java
@@ -83,7 +83,7 @@ public void testDuelESLucene() throws Exception {
}
public void testMissingIndexThrowsMissingIndex() throws Exception {
- TermVectorsRequestBuilder requestBuilder = client().prepareTermVectors("testX", "typeX", Integer.toString(1));
+ TermVectorsRequestBuilder requestBuilder = client().prepareTermVectors("testX", Integer.toString(1));
MultiTermVectorsRequestBuilder mtvBuilder = client().prepareMultiTermVectors();
mtvBuilder.add(requestBuilder.request());
MultiTermVectorsResponse response = mtvBuilder.execute().actionGet();
@@ -96,7 +96,7 @@ public void testMultiTermVectorsWithVersion() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1)));
ensureGreen();
- MultiTermVectorsResponse response = client().prepareMultiTermVectors().add(indexOrAlias(), "type1", "1").get();
+ MultiTermVectorsResponse response = client().prepareMultiTermVectors().add(indexOrAlias(), "1").get();
assertThat(response.getResponses().length, equalTo(1));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false));
@@ -106,9 +106,9 @@ public void testMultiTermVectorsWithVersion() throws Exception {
// Version from translog
response = client().prepareMultiTermVectors()
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(Versions.MATCH_ANY))
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(1))
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(2))
+ .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(Versions.MATCH_ANY))
+ .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(1))
+ .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
@@ -130,9 +130,9 @@ public void testMultiTermVectorsWithVersion() throws Exception {
// Version from Lucene index
refresh();
response = client().prepareMultiTermVectors()
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(Versions.MATCH_ANY).realtime(false))
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(1).realtime(false))
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(2).realtime(false))
+ .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(Versions.MATCH_ANY).realtime(false))
+ .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(1).realtime(false))
+ .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(2).realtime(false))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
@@ -155,9 +155,9 @@ public void testMultiTermVectorsWithVersion() throws Exception {
// Version from translog
response = client().prepareMultiTermVectors()
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(Versions.MATCH_ANY))
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(1))
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(2))
+ .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(Versions.MATCH_ANY))
+ .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(1))
+ .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
@@ -180,9 +180,9 @@ public void testMultiTermVectorsWithVersion() throws Exception {
// Version from Lucene index
refresh();
response = client().prepareMultiTermVectors()
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(Versions.MATCH_ANY))
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(1))
- .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(2))
+ .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(Versions.MATCH_ANY))
+ .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(1))
+ .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
diff --git a/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java
index fa2ebe3fa2108..541fe495ee8e8 100644
--- a/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java
@@ -117,7 +117,7 @@ public void testAliases() throws Exception {
logger.info("--> indexing against [alias1], should fail now");
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
- () -> client().index(indexRequest("alias1").type("type1").id("1").source(source("2", "test"), XContentType.JSON)).actionGet()
+ () -> client().index(indexRequest("alias1").id("1").source(source("2", "test"), XContentType.JSON)).actionGet()
);
assertThat(
exception.getMessage(),
@@ -134,9 +134,8 @@ public void testAliases() throws Exception {
});
logger.info("--> indexing against [alias1], should work now");
- IndexResponse indexResponse = client().index(
- indexRequest("alias1").type("type1").id("1").source(source("1", "test"), XContentType.JSON)
- ).actionGet();
+ IndexResponse indexResponse = client().index(indexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON))
+ .actionGet();
assertThat(indexResponse.getIndex(), equalTo("test"));
logger.info("--> creating index [test_x]");
@@ -152,7 +151,7 @@ public void testAliases() throws Exception {
logger.info("--> indexing against [alias1], should fail now");
exception = expectThrows(
IllegalArgumentException.class,
- () -> client().index(indexRequest("alias1").type("type1").id("1").source(source("2", "test"), XContentType.JSON)).actionGet()
+ () -> client().index(indexRequest("alias1").id("1").source(source("2", "test"), XContentType.JSON)).actionGet()
);
assertThat(
exception.getMessage(),
@@ -164,10 +163,7 @@ public void testAliases() throws Exception {
);
logger.info("--> deleting against [alias1], should fail now");
- exception = expectThrows(
- IllegalArgumentException.class,
- () -> client().delete(deleteRequest("alias1").type("type1").id("1")).actionGet()
- );
+ exception = expectThrows(IllegalArgumentException.class, () -> client().delete(deleteRequest("alias1").id("1")).actionGet());
assertThat(
exception.getMessage(),
equalTo(
@@ -183,8 +179,7 @@ public void testAliases() throws Exception {
});
logger.info("--> indexing against [alias1], should work now");
- indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"), XContentType.JSON))
- .actionGet();
+ indexResponse = client().index(indexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test"));
assertAliasesVersionIncreases("test_x", () -> {
@@ -193,12 +188,11 @@ public void testAliases() throws Exception {
});
logger.info("--> indexing against [alias1], should work now");
- indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"), XContentType.JSON))
- .actionGet();
+ indexResponse = client().index(indexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test_x"));
logger.info("--> deleting against [alias1], should fail now");
- DeleteResponse deleteResponse = client().delete(deleteRequest("alias1").type("type1").id("1")).actionGet();
+ DeleteResponse deleteResponse = client().delete(deleteRequest("alias1").id("1")).actionGet();
assertThat(deleteResponse.getIndex(), equalTo("test_x"));
assertAliasesVersionIncreases("test_x", () -> {
@@ -207,8 +201,7 @@ public void testAliases() throws Exception {
});
logger.info("--> indexing against [alias1], should work against [test_x]");
- indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"), XContentType.JSON))
- .actionGet();
+ indexResponse = client().index(indexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test_x"));
}
@@ -290,28 +283,16 @@ public void testSearchingFilteringAliasesSingleIndex() throws Exception {
logger.info("--> indexing against [test]");
client().index(
- indexRequest("test").type("type1")
- .id("1")
- .source(source("1", "foo test"), XContentType.JSON)
- .setRefreshPolicy(RefreshPolicy.IMMEDIATE)
+ indexRequest("test").id("1").source(source("1", "foo test"), XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)
).actionGet();
client().index(
- indexRequest("test").type("type1")
- .id("2")
- .source(source("2", "bar test"), XContentType.JSON)
- .setRefreshPolicy(RefreshPolicy.IMMEDIATE)
+ indexRequest("test").id("2").source(source("2", "bar test"), XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)
).actionGet();
client().index(
- indexRequest("test").type("type1")
- .id("3")
- .source(source("3", "baz test"), XContentType.JSON)
- .setRefreshPolicy(RefreshPolicy.IMMEDIATE)
+ indexRequest("test").id("3").source(source("3", "baz test"), XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)
).actionGet();
client().index(
- indexRequest("test").type("type1")
- .id("4")
- .source(source("4", "something else"), XContentType.JSON)
- .setRefreshPolicy(RefreshPolicy.IMMEDIATE)
+ indexRequest("test").id("4").source(source("4", "something else"), XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)
).actionGet();
logger.info("--> checking single filtering alias search");
@@ -408,16 +389,16 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception {
);
logger.info("--> indexing against [test1]");
- client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"), XContentType.JSON)).get();
- client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"), XContentType.JSON)).get();
- client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"), XContentType.JSON)).get();
- client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("1").source(source("1", "foo test"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("2").source(source("2", "bar test"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("3").source(source("3", "baz test"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("4").source(source("4", "something else"), XContentType.JSON)).get();
logger.info("--> indexing against [test2]");
- client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("5").source(source("5", "foo test"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("6").source(source("6", "bar test"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("7").source(source("7", "baz test"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("8").source(source("8", "something else"), XContentType.JSON)).get();
refresh();
@@ -524,17 +505,17 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception {
);
logger.info("--> indexing against [test1]");
- client().index(indexRequest("test1").type("type1").id("11").source(source("11", "foo test1"), XContentType.JSON)).get();
- client().index(indexRequest("test1").type("type1").id("12").source(source("12", "bar test1"), XContentType.JSON)).get();
- client().index(indexRequest("test1").type("type1").id("13").source(source("13", "baz test1"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("11").source(source("11", "foo test1"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("12").source(source("12", "bar test1"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("13").source(source("13", "baz test1"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("21").source(source("21", "foo test2"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("22").source(source("22", "bar test2"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("23").source(source("23", "baz test2"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("21").source(source("21", "foo test2"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("22").source(source("22", "bar test2"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("23").source(source("23", "baz test2"), XContentType.JSON)).get();
- client().index(indexRequest("test3").type("type1").id("31").source(source("31", "foo test3"), XContentType.JSON)).get();
- client().index(indexRequest("test3").type("type1").id("32").source(source("32", "bar test3"), XContentType.JSON)).get();
- client().index(indexRequest("test3").type("type1").id("33").source(source("33", "baz test3"), XContentType.JSON)).get();
+ client().index(indexRequest("test3").id("31").source(source("31", "foo test3"), XContentType.JSON)).get();
+ client().index(indexRequest("test3").id("32").source(source("32", "bar test3"), XContentType.JSON)).get();
+ client().index(indexRequest("test3").id("33").source(source("33", "baz test3"), XContentType.JSON)).get();
refresh();
@@ -647,16 +628,16 @@ public void testDeletingByQueryFilteringAliases() throws Exception {
);
logger.info("--> indexing against [test1]");
- client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"), XContentType.JSON)).get();
- client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"), XContentType.JSON)).get();
- client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"), XContentType.JSON)).get();
- client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("1").source(source("1", "foo test"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("2").source(source("2", "bar test"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("3").source(source("3", "baz test"), XContentType.JSON)).get();
+ client().index(indexRequest("test1").id("4").source(source("4", "something else"), XContentType.JSON)).get();
logger.info("--> indexing against [test2]");
- client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"), XContentType.JSON)).get();
- client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("5").source(source("5", "foo test"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("6").source(source("6", "bar test"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("7").source(source("7", "baz test"), XContentType.JSON)).get();
+ client().index(indexRequest("test2").id("8").source(source("8", "something else"), XContentType.JSON)).get();
refresh();
@@ -744,7 +725,7 @@ public void testWaitForAliasCreationMultipleShards() throws Exception {
for (int i = 0; i < 10; i++) {
final String aliasName = "alias" + i;
assertAliasesVersionIncreases("test", () -> assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName)));
- client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"), XContentType.JSON)).get();
+ client().index(indexRequest(aliasName).id("1").source(source("1", "test"), XContentType.JSON)).get();
}
}
@@ -765,7 +746,7 @@ public void testWaitForAliasCreationSingleShard() throws Exception {
for (int i = 0; i < 10; i++) {
final String aliasName = "alias" + i;
assertAliasesVersionIncreases("test", () -> assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName)));
- client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"), XContentType.JSON)).get();
+ client().index(indexRequest(aliasName).id("1").source(source("1", "test"), XContentType.JSON)).get();
}
}
@@ -787,8 +768,7 @@ public void run() {
"test",
() -> assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName))
);
- client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"), XContentType.JSON))
- .actionGet();
+ client().index(indexRequest(aliasName).id("1").source(source("1", "test"), XContentType.JSON)).actionGet();
}
});
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/broadcast/BroadcastActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/broadcast/BroadcastActionsIT.java
index c45155809a5ea..f9f99eb2662b0 100644
--- a/server/src/internalClusterTest/java/org/opensearch/broadcast/BroadcastActionsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/broadcast/BroadcastActionsIT.java
@@ -40,7 +40,7 @@
import java.io.IOException;
import static org.opensearch.client.Requests.indexRequest;
-import static org.opensearch.index.query.QueryBuilders.termQuery;
+import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
@@ -57,16 +57,16 @@ public void testBroadcastOperations() throws IOException {
NumShards numShards = getNumShards("test");
logger.info("Running Cluster Health");
- client().index(indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet();
+ client().index(indexRequest("test").id("1").source(source("1", "test"))).actionGet();
flush();
- client().index(indexRequest("test").type("type1").id("2").source(source("2", "test"))).actionGet();
+ client().index(indexRequest("test").id("2").source(source("2", "test"))).actionGet();
refresh();
logger.info("Count");
// check count
for (int i = 0; i < 5; i++) {
// test successful
- SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(termQuery("_type", "type1")).get();
+ SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get();
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries));
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java
index 1fd61c9e063d0..f8f686b27f29b 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java
@@ -115,25 +115,25 @@ public void testNoMasterActions() throws Exception {
});
assertRequestBuilderThrows(
- clientToMasterlessNode.prepareGet("test", "type1", "1"),
+ clientToMasterlessNode.prepareGet("test", "1"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
- clientToMasterlessNode.prepareGet("no_index", "type1", "1"),
+ clientToMasterlessNode.prepareGet("no_index", "1"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
- clientToMasterlessNode.prepareMultiGet().add("test", "type1", "1"),
+ clientToMasterlessNode.prepareMultiGet().add("test", "1"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
- clientToMasterlessNode.prepareMultiGet().add("no_index", "type1", "1"),
+ clientToMasterlessNode.prepareMultiGet().add("no_index", "1"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
@@ -275,7 +275,7 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception {
assertTrue(state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID));
});
- GetResponse getResponse = clientToMasterlessNode.prepareGet("test1", "type1", "1").get();
+ GetResponse getResponse = clientToMasterlessNode.prepareGet("test1", "1").get();
assertExists(getResponse);
SearchResponse countResponse = clientToMasterlessNode.prepareSearch("test1").setAllowPartialSearchResults(true).setSize(0).get();
@@ -371,10 +371,10 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception {
}
});
- GetResponse getResponse = client(randomFrom(nodesWithShards)).prepareGet("test1", "type1", "1").get();
+ GetResponse getResponse = client(randomFrom(nodesWithShards)).prepareGet("test1", "1").get();
assertExists(getResponse);
- expectThrows(Exception.class, () -> client(partitionedNode).prepareGet("test1", "type1", "1").get());
+ expectThrows(Exception.class, () -> client(partitionedNode).prepareGet("test1", "1").get());
SearchResponse countResponse = client(randomFrom(nodesWithShards)).prepareSearch("test1")
.setAllowPartialSearchResults(true)
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java
index 6317d633f25ea..233dca2dabb28 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java
@@ -378,7 +378,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception {
final ActionFuture docIndexResponse = client().prepareIndex("index", "type", "1").setSource("field", 42).execute();
- assertBusy(() -> assertTrue(client().prepareGet("index", "type", "1").get().isExists()));
+ assertBusy(() -> assertTrue(client().prepareGet("index", "1").get().isExists()));
// index another document, this time using dynamic mappings.
// The ack timeout of 0 on dynamic mapping updates makes it possible for the document to be indexed on the primary, even
@@ -400,7 +400,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception {
assertNotNull(mapper.mappers().getMapper("field2"));
});
- assertBusy(() -> assertTrue(client().prepareGet("index", "type", "2").get().isExists()));
+ assertBusy(() -> assertTrue(client().prepareGet("index", "2").get().isExists()));
// The mappings have not been propagated to the replica yet as a consequence the document count not be indexed
// We wait on purpose to make sure that the document is not indexed because the shard operation is stalled
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java
index 5c07ef8e7baea..ea5bb145cfd75 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java
@@ -439,7 +439,7 @@ public void testAllMasterEligibleNodesFailedDanglingIndexImport() throws Excepti
logger.info("--> verify 1 doc in the index");
assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
- assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
+ assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(true));
logger.info("--> stop data-only node and detach it from the old cluster");
Settings dataNodeDataPathSettings = Settings.builder()
@@ -474,7 +474,7 @@ public boolean clearData(String nodeName) {
ensureGreen("test");
logger.info("--> verify the doc is there");
- assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
+ assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(true));
}
public void testNoInitialBootstrapAfterDetach() throws Exception {
diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java
index 0bfd3e22a3bc9..6da62ab5107c9 100644
--- a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java
@@ -255,7 +255,7 @@ public void testAckedIndexing() throws Exception {
for (String id : ackedDocs.keySet()) {
assertTrue(
"doc [" + id + "] indexed via node [" + ackedDocs.get(id) + "] not found",
- client(node).prepareGet("test", "type", id).setPreference("_local").get().isExists()
+ client(node).prepareGet("test", id).setPreference("_local").get().isExists()
);
}
} catch (AssertionError | NoShardAvailableActionException e) {
@@ -316,7 +316,7 @@ public void testRejoinDocumentExistsInAllShardCopies() throws Exception {
logger.info("Verifying if document exists via node[{}]", notIsolatedNode);
GetResponse getResponse = internalCluster().client(notIsolatedNode)
- .prepareGet("test", "type", indexResponse.getId())
+ .prepareGet("test", indexResponse.getId())
.setPreference("_local")
.get();
assertThat(getResponse.isExists(), is(true));
@@ -330,7 +330,7 @@ public void testRejoinDocumentExistsInAllShardCopies() throws Exception {
for (String node : nodes) {
logger.info("Verifying if document exists after isolating node[{}] via node[{}]", isolatedNode, node);
- getResponse = internalCluster().client(node).prepareGet("test", "type", indexResponse.getId()).setPreference("_local").get();
+ getResponse = internalCluster().client(node).prepareGet("test", indexResponse.getId()).setPreference("_local").get();
assertThat(getResponse.isExists(), is(true));
assertThat(getResponse.getVersion(), equalTo(1L));
assertThat(getResponse.getId(), equalTo(indexResponse.getId()));
diff --git a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java
index d1138351bde76..4ca281fad157a 100644
--- a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java
@@ -90,7 +90,6 @@ public void testIndexActions() throws Exception {
.get();
assertThat(indexResponse.getIndex(), equalTo(getConcreteIndexName()));
assertThat(indexResponse.getId(), equalTo("1"));
- assertThat(indexResponse.getType(), equalTo("type1"));
logger.info("Refreshing");
RefreshResponse refreshResponse = refresh();
assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards));
@@ -117,18 +116,18 @@ public void testIndexActions() throws Exception {
logger.info("Get [type1/1]");
for (int i = 0; i < 5; i++) {
- getResult = client().prepareGet("test", "type1", "1").execute().actionGet();
+ getResult = client().prepareGet("test", "1").execute().actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
assertThat("cycle(map) #" + i, (String) getResult.getSourceAsMap().get("name"), equalTo("test"));
- getResult = client().get(getRequest("test").type("type1").id("1")).actionGet();
+ getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
}
logger.info("Get [type1/1] with script");
for (int i = 0; i < 5; i++) {
- getResult = client().prepareGet("test", "type1", "1").setStoredFields("name").execute().actionGet();
+ getResult = client().prepareGet("test", "1").setStoredFields("name").execute().actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat(getResult.isExists(), equalTo(true));
assertThat(getResult.getSourceAsBytes(), nullValue());
@@ -137,7 +136,7 @@ public void testIndexActions() throws Exception {
logger.info("Get [type1/2] (should be empty)");
for (int i = 0; i < 5; i++) {
- getResult = client().get(getRequest("test").type("type1").id("2")).actionGet();
+ getResult = client().get(getRequest("test").id("2")).actionGet();
assertThat(getResult.isExists(), equalTo(false));
}
@@ -145,20 +144,19 @@ public void testIndexActions() throws Exception {
DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").execute().actionGet();
assertThat(deleteResponse.getIndex(), equalTo(getConcreteIndexName()));
assertThat(deleteResponse.getId(), equalTo("1"));
- assertThat(deleteResponse.getType(), equalTo("type1"));
logger.info("Refreshing");
client().admin().indices().refresh(refreshRequest("test")).actionGet();
logger.info("Get [type1/1] (should be empty)");
for (int i = 0; i < 5; i++) {
- getResult = client().get(getRequest("test").type("type1").id("1")).actionGet();
+ getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat(getResult.isExists(), equalTo(false));
}
logger.info("Index [type1/1]");
- client().index(indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet();
+ client().index(indexRequest("test").id("1").source(source("1", "test"))).actionGet();
logger.info("Index [type1/2]");
- client().index(indexRequest("test").type("type1").id("2").source(source("2", "test2"))).actionGet();
+ client().index(indexRequest("test").id("2").source(source("2", "test2"))).actionGet();
logger.info("Flushing");
FlushResponse flushResult = client().admin().indices().prepareFlush("test").execute().actionGet();
@@ -169,10 +167,10 @@ public void testIndexActions() throws Exception {
logger.info("Get [type1/1] and [type1/2]");
for (int i = 0; i < 5; i++) {
- getResult = client().get(getRequest("test").type("type1").id("1")).actionGet();
+ getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
- getResult = client().get(getRequest("test").type("type1").id("2")).actionGet();
+ getResult = client().get(getRequest("test").id("2")).actionGet();
String ste1 = getResult.getSourceAsString();
String ste2 = Strings.toString(source("2", "test2"));
assertThat("cycle #" + i, ste1, equalTo(ste2));
@@ -228,37 +226,31 @@ public void testBulk() throws Exception {
assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[0].getOpType(), equalTo(OpType.INDEX));
assertThat(bulkResponse.getItems()[0].getIndex(), equalTo(getConcreteIndexName()));
- assertThat(bulkResponse.getItems()[0].getType(), equalTo("type1"));
assertThat(bulkResponse.getItems()[0].getId(), equalTo("1"));
assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[1].getOpType(), equalTo(OpType.CREATE));
assertThat(bulkResponse.getItems()[1].getIndex(), equalTo(getConcreteIndexName()));
- assertThat(bulkResponse.getItems()[1].getType(), equalTo("type1"));
assertThat(bulkResponse.getItems()[1].getId(), equalTo("2"));
assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[2].getOpType(), equalTo(OpType.INDEX));
assertThat(bulkResponse.getItems()[2].getIndex(), equalTo(getConcreteIndexName()));
- assertThat(bulkResponse.getItems()[2].getType(), equalTo("type1"));
String generatedId3 = bulkResponse.getItems()[2].getId();
assertThat(bulkResponse.getItems()[3].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[3].getOpType(), equalTo(OpType.CREATE));
assertThat(bulkResponse.getItems()[3].getIndex(), equalTo(getConcreteIndexName()));
- assertThat(bulkResponse.getItems()[3].getType(), equalTo("type1"));
String generatedId4 = bulkResponse.getItems()[3].getId();
assertThat(bulkResponse.getItems()[4].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[4].getOpType(), equalTo(OpType.DELETE));
assertThat(bulkResponse.getItems()[4].getIndex(), equalTo(getConcreteIndexName()));
- assertThat(bulkResponse.getItems()[4].getType(), equalTo("type1"));
assertThat(bulkResponse.getItems()[4].getId(), equalTo("1"));
assertThat(bulkResponse.getItems()[5].isFailed(), equalTo(true));
assertThat(bulkResponse.getItems()[5].getOpType(), equalTo(OpType.INDEX));
assertThat(bulkResponse.getItems()[5].getIndex(), equalTo(getConcreteIndexName()));
- assertThat(bulkResponse.getItems()[5].getType(), equalTo("type1"));
waitForRelocation(ClusterHealthStatus.GREEN);
RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().actionGet();
@@ -266,15 +258,15 @@ public void testBulk() throws Exception {
assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards));
for (int i = 0; i < 5; i++) {
- GetResponse getResult = client().get(getRequest("test").type("type1").id("1")).actionGet();
+ GetResponse getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat("cycle #" + i, getResult.isExists(), equalTo(false));
- getResult = client().get(getRequest("test").type("type1").id("2")).actionGet();
+ getResult = client().get(getRequest("test").id("2")).actionGet();
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("2", "test"))));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
- getResult = client().get(getRequest("test").type("type1").id(generatedId3)).actionGet();
+ getResult = client().get(getRequest("test").id(generatedId3)).actionGet();
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("3", "test"))));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
diff --git a/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java
index ccb2920c274eb..c90aa333604d3 100644
--- a/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java
@@ -69,7 +69,7 @@ public void testRepurpose() throws Exception {
ensureGreen();
- assertTrue(client().prepareGet(indexName, "type1", "1").get().isExists());
+ assertTrue(client().prepareGet(indexName, "1").get().isExists());
final Settings masterNodeDataPathSettings = internalCluster().dataPathSettings(masterNode);
final Settings dataNodeDataPathSettings = internalCluster().dataPathSettings(dataNode);
@@ -112,7 +112,7 @@ public void testRepurpose() throws Exception {
internalCluster().startCoordinatingOnlyNode(dataNodeDataPathSettings);
assertTrue(indexExists(indexName));
- expectThrows(NoShardAvailableActionException.class, () -> client().prepareGet(indexName, "type1", "1").get());
+ expectThrows(NoShardAvailableActionException.class, () -> client().prepareGet(indexName, "1").get());
logger.info("--> Restarting and repurposing other node");
diff --git a/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java b/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java
index 79fe3a9119eae..178a424d07a7c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java
@@ -65,36 +65,33 @@ public void testSimple() throws Exception {
client().prepareIndex("test", "test", "1").setSource("field", "value1").get();
- ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.matchAllQuery()).get();
+ ExplainResponse response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.matchAllQuery()).get();
assertNotNull(response);
assertFalse(response.isExists()); // not a match b/c not realtime
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getType(), equalTo("test"));
assertThat(response.getId(), equalTo("1"));
assertFalse(response.isMatch()); // not a match b/c not realtime
refresh();
- response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.matchAllQuery()).get();
+ response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.matchAllQuery()).get();
assertNotNull(response);
assertTrue(response.isMatch());
assertNotNull(response.getExplanation());
assertTrue(response.getExplanation().isMatch());
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getType(), equalTo("test"));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getExplanation().getValue(), equalTo(1.0f));
- response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.termQuery("field", "value2")).get();
+ response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.termQuery("field", "value2")).get();
assertNotNull(response);
assertTrue(response.isExists());
assertFalse(response.isMatch());
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getType(), equalTo("test"));
assertThat(response.getId(), equalTo("1"));
assertNotNull(response.getExplanation());
assertFalse(response.getExplanation().isMatch());
- response = client().prepareExplain(indexOrAlias(), "test", "1")
+ response = client().prepareExplain(indexOrAlias(), "1")
.setQuery(
QueryBuilders.boolQuery().must(QueryBuilders.termQuery("field", "value1")).must(QueryBuilders.termQuery("field", "value2"))
)
@@ -103,18 +100,16 @@ public void testSimple() throws Exception {
assertTrue(response.isExists());
assertFalse(response.isMatch());
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getType(), equalTo("test"));
assertThat(response.getId(), equalTo("1"));
assertNotNull(response.getExplanation());
assertFalse(response.getExplanation().isMatch());
assertThat(response.getExplanation().getDetails().length, equalTo(2));
- response = client().prepareExplain(indexOrAlias(), "test", "2").setQuery(QueryBuilders.matchAllQuery()).get();
+ response = client().prepareExplain(indexOrAlias(), "2").setQuery(QueryBuilders.matchAllQuery()).get();
assertNotNull(response);
assertFalse(response.isExists());
assertFalse(response.isMatch());
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getType(), equalTo("test"));
assertThat(response.getId(), equalTo("2"));
}
@@ -132,7 +127,7 @@ public void testExplainWithFields() throws Exception {
.get();
refresh();
- ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1")
+ ExplainResponse response = client().prepareExplain(indexOrAlias(), "1")
.setQuery(QueryBuilders.matchAllQuery())
.setStoredFields("obj1.field1")
.get();
@@ -149,7 +144,7 @@ public void testExplainWithFields() throws Exception {
assertThat(response.getGetResult().isSourceEmpty(), equalTo(true));
refresh();
- response = client().prepareExplain(indexOrAlias(), "test", "1")
+ response = client().prepareExplain(indexOrAlias(), "1")
.setQuery(QueryBuilders.matchAllQuery())
.setStoredFields("obj1.field1")
.setFetchSource(true)
@@ -166,20 +161,20 @@ public void testExplainWithFields() throws Exception {
assertThat(response.getGetResult().getFields().get("obj1.field1").getValue().toString(), equalTo("value1"));
assertThat(response.getGetResult().isSourceEmpty(), equalTo(false));
- response = client().prepareExplain(indexOrAlias(), "test", "1")
+ response = client().prepareExplain(indexOrAlias(), "1")
.setQuery(QueryBuilders.matchAllQuery())
.setStoredFields("obj1.field1", "obj1.field2")
.get();
assertNotNull(response);
assertTrue(response.isMatch());
- String v1 = (String) response.getGetResult().field("obj1.field1").getValue();
- String v2 = (String) response.getGetResult().field("obj1.field2").getValue();
+ String v1 = response.getGetResult().field("obj1.field1").getValue();
+ String v2 = response.getGetResult().field("obj1.field2").getValue();
assertThat(v1, equalTo("value1"));
assertThat(v2, equalTo("value2"));
}
@SuppressWarnings("unchecked")
- public void testExplainWitSource() throws Exception {
+ public void testExplainWithSource() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
ensureGreen("test");
@@ -190,7 +185,7 @@ public void testExplainWitSource() throws Exception {
.get();
refresh();
- ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1")
+ ExplainResponse response = client().prepareExplain(indexOrAlias(), "1")
.setQuery(QueryBuilders.matchAllQuery())
.setFetchSource("obj1.field1", null)
.get();
@@ -204,7 +199,7 @@ public void testExplainWitSource() throws Exception {
assertThat(response.getGetResult().getSource().size(), equalTo(1));
assertThat(((Map) response.getGetResult().getSource().get("obj1")).get("field1").toString(), equalTo("value1"));
- response = client().prepareExplain(indexOrAlias(), "test", "1")
+ response = client().prepareExplain(indexOrAlias(), "1")
.setQuery(QueryBuilders.matchAllQuery())
.setFetchSource(null, "obj1.field2")
.get();
@@ -213,7 +208,7 @@ public void testExplainWitSource() throws Exception {
assertThat(((Map) response.getGetResult().getSource().get("obj1")).get("field1").toString(), equalTo("value1"));
}
- public void testExplainWithFilteredAlias() throws Exception {
+ public void testExplainWithFilteredAlias() {
assertAcked(
prepareCreate("test").addMapping("test", "field2", "type=text")
.addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2")))
@@ -223,7 +218,7 @@ public void testExplainWithFilteredAlias() throws Exception {
client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get();
refresh();
- ExplainResponse response = client().prepareExplain("alias1", "test", "1").setQuery(QueryBuilders.matchAllQuery()).get();
+ ExplainResponse response = client().prepareExplain("alias1", "1").setQuery(QueryBuilders.matchAllQuery()).get();
assertNotNull(response);
assertTrue(response.isExists());
assertFalse(response.isMatch());
@@ -242,7 +237,7 @@ public void testExplainWithFilteredAliasFetchSource() throws Exception {
client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get();
refresh();
- ExplainResponse response = client().prepareExplain("alias1", "test", "1")
+ ExplainResponse response = client().prepareExplain("alias1", "1")
.setQuery(QueryBuilders.matchAllQuery())
.setFetchSource(true)
.get();
@@ -251,11 +246,9 @@ public void testExplainWithFilteredAliasFetchSource() throws Exception {
assertTrue(response.isExists());
assertFalse(response.isMatch());
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getType(), equalTo("test"));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getGetResult(), notNullValue());
assertThat(response.getGetResult().getIndex(), equalTo("test"));
- assertThat(response.getGetResult().getType(), equalTo("test"));
assertThat(response.getGetResult().getId(), equalTo("1"));
assertThat(response.getGetResult().getSource(), notNullValue());
assertThat((String) response.getGetResult().getSource().get("field1"), equalTo("value1"));
@@ -272,9 +265,7 @@ public void testExplainDateRangeInQueryString() {
refresh();
- ExplainResponse explainResponse = client().prepareExplain("test", "type", "1")
- .setQuery(queryStringQuery("past:[now-2M/d TO now/d]"))
- .get();
+ ExplainResponse explainResponse = client().prepareExplain("test", "1").setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get();
assertThat(explainResponse.isExists(), equalTo(true));
assertThat(explainResponse.isMatch(), equalTo(true));
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java
index f306425fc9458..4c0fa15a55824 100644
--- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java
@@ -214,7 +214,7 @@ public void testSimpleOpenClose() throws Exception {
);
logger.info("--> trying to get the indexed document on the first index");
- GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
+ GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
logger.info("--> closing test index...");
@@ -255,7 +255,7 @@ public void testSimpleOpenClose() throws Exception {
);
logger.info("--> trying to get the indexed document on the first round (before close and shutdown)");
- getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
+ getResponse = client().prepareGet("test", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
logger.info("--> indexing a simple document");
diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java
index 23432b4bd15b1..e9414fd651ca0 100644
--- a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java
@@ -33,7 +33,6 @@
package org.opensearch.gateway;
import org.opensearch.LegacyESVersion;
-import org.opensearch.action.admin.indices.flush.SyncedFlushResponse;
import org.opensearch.action.admin.indices.stats.ShardStats;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.node.DiscoveryNode;
@@ -98,7 +97,6 @@ public void testPreferCopyCanPerformNoopRecovery() throws Exception {
.prepareCreate(indexName)
.setSettings(
Settings.builder()
- .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean())
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
.put(IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING.getKey(), 1.0f)
@@ -197,10 +195,6 @@ public void testRecentPrimaryInformation() throws Exception {
.mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v"))
.collect(Collectors.toList())
);
- assertBusy(() -> {
- SyncedFlushResponse syncedFlushResponse = client().admin().indices().prepareSyncedFlush(indexName).get();
- assertThat(syncedFlushResponse.successfulShards(), equalTo(2));
- });
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodeWithReplica));
if (randomBoolean()) {
indexRandom(
@@ -280,7 +274,6 @@ public void testFullClusterRestartPerformNoopRecovery() throws Exception {
.prepareCreate(indexName)
.setSettings(
Settings.builder()
- .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean())
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), randomIntBetween(10, 100) + "kb")
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numOfReplicas)
@@ -342,7 +335,6 @@ public void testPreferCopyWithHighestMatchingOperations() throws Exception {
.prepareCreate(indexName)
.setSettings(
Settings.builder()
- .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean())
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), randomIntBetween(10, 100) + "kb")
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
@@ -473,7 +465,6 @@ public void testPeerRecoveryForClosedIndices() throws Exception {
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
- .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean())
.put(IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING.getKey(), "100ms")
.put(IndexService.RETENTION_LEASE_SYNC_INTERVAL_SETTING.getKey(), "100ms")
.build()
@@ -552,12 +543,6 @@ public void testSimulateRecoverySourceOnOldNode() throws Exception {
if (randomBoolean()) {
client().admin().indices().prepareFlush(indexName).get();
}
- if (randomBoolean()) {
- assertBusy(() -> {
- SyncedFlushResponse syncedFlushResponse = client().admin().indices().prepareSyncedFlush(indexName).get();
- assertThat(syncedFlushResponse.successfulShards(), equalTo(1));
- });
- }
internalCluster().startDataOnlyNode();
MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, source);
Semaphore failRecovery = new Semaphore(1);
@@ -591,10 +576,11 @@ public void testSimulateRecoverySourceOnOldNode() throws Exception {
transportService.clearAllRules();
}
- private void ensureActivePeerRecoveryRetentionLeasesAdvanced(String indexName) throws Exception {
+ public static void ensureActivePeerRecoveryRetentionLeasesAdvanced(String indexName) throws Exception {
+ final ClusterService clusterService = internalCluster().clusterService();
assertBusy(() -> {
Index index = resolveIndex(indexName);
- Set activeRetentionLeaseIds = clusterService().state()
+ Set activeRetentionLeaseIds = clusterService.state()
.routingTable()
.index(index)
.shard(0)
diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java
index f8079aa1d93f3..327e35dbc7d0b 100644
--- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java
@@ -90,25 +90,25 @@ public void testSimpleGet() {
);
ensureGreen();
- GetResponse response = client().prepareGet(indexOrAlias(), "type1", "1").get();
+ GetResponse response = client().prepareGet(indexOrAlias(), "1").get();
assertThat(response.isExists(), equalTo(false));
logger.info("--> index doc 1");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get();
logger.info("--> non realtime get 1");
- response = client().prepareGet(indexOrAlias(), "type1", "1").setRealtime(false).get();
+ response = client().prepareGet(indexOrAlias(), "1").setRealtime(false).get();
assertThat(response.isExists(), equalTo(false));
logger.info("--> realtime get 1");
- response = client().prepareGet(indexOrAlias(), "type1", "1").get();
+ response = client().prepareGet(indexOrAlias(), "1").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime get 1 (no source, implicit)");
- response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields(Strings.EMPTY_ARRAY).get();
+ response = client().prepareGet(indexOrAlias(), "1").setStoredFields(Strings.EMPTY_ARRAY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
Set fields = new HashSet<>(response.getFields().keySet());
@@ -116,7 +116,7 @@ public void testSimpleGet() {
assertThat(response.getSourceAsBytes(), nullValue());
logger.info("--> realtime get 1 (no source, explicit)");
- response = client().prepareGet(indexOrAlias(), "type1", "1").setFetchSource(false).get();
+ response = client().prepareGet(indexOrAlias(), "1").setFetchSource(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
fields = new HashSet<>(response.getFields().keySet());
@@ -124,14 +124,14 @@ public void testSimpleGet() {
assertThat(response.getSourceAsBytes(), nullValue());
logger.info("--> realtime get 1 (no type)");
- response = client().prepareGet(indexOrAlias(), null, "1").get();
+ response = client().prepareGet(indexOrAlias(), "1").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime fetch of field");
- response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get();
+ response = client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytes(), nullValue());
@@ -139,7 +139,7 @@ public void testSimpleGet() {
assertThat(response.getField("field2"), nullValue());
logger.info("--> realtime fetch of field & source");
- response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").setFetchSource("field1", null).get();
+ response = client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").setFetchSource("field1", null).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap(), hasKey("field1"));
@@ -148,7 +148,7 @@ public void testSimpleGet() {
assertThat(response.getField("field2"), nullValue());
logger.info("--> realtime get 1");
- response = client().prepareGet(indexOrAlias(), "type1", "1").get();
+ response = client().prepareGet(indexOrAlias(), "1").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1"));
@@ -158,14 +158,14 @@ public void testSimpleGet() {
refresh();
logger.info("--> non realtime get 1 (loaded from index)");
- response = client().prepareGet(indexOrAlias(), "type1", "1").setRealtime(false).get();
+ response = client().prepareGet(indexOrAlias(), "1").setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime fetch of field (loaded from index)");
- response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get();
+ response = client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytes(), nullValue());
@@ -173,7 +173,7 @@ public void testSimpleGet() {
assertThat(response.getField("field2"), nullValue());
logger.info("--> realtime fetch of field & source (loaded from index)");
- response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").setFetchSource(true).get();
+ response = client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").setFetchSource(true).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytes(), not(nullValue()));
@@ -184,7 +184,7 @@ public void testSimpleGet() {
client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").get();
logger.info("--> realtime get 1");
- response = client().prepareGet(indexOrAlias(), "type1", "1").get();
+ response = client().prepareGet(indexOrAlias(), "1").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_1"));
@@ -193,7 +193,7 @@ public void testSimpleGet() {
logger.info("--> update doc 1 again");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1_2", "field2", "value2_2").get();
- response = client().prepareGet(indexOrAlias(), "type1", "1").get();
+ response = client().prepareGet(indexOrAlias(), "1").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_2"));
@@ -202,7 +202,7 @@ public void testSimpleGet() {
DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").get();
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
- response = client().prepareGet(indexOrAlias(), "type1", "1").get();
+ response = client().prepareGet(indexOrAlias(), "1").get();
assertThat(response.isExists(), equalTo(false));
}
@@ -222,7 +222,7 @@ public void testGetWithAliasPointingToMultipleIndices() {
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
- () -> client().prepareGet("alias1", "type", "_alias_id").get()
+ () -> client().prepareGet("alias1", "_alias_id").get()
);
assertThat(exception.getMessage(), endsWith("can't execute a single index op"));
}
@@ -239,7 +239,7 @@ public void testSimpleMultiGet() throws Exception {
);
ensureGreen();
- MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "type1", "1").get();
+ MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "1").get();
assertThat(response.getResponses().length, equalTo(1));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false));
@@ -248,11 +248,11 @@ public void testSimpleMultiGet() throws Exception {
}
response = client().prepareMultiGet()
- .add(indexOrAlias(), "type1", "1")
- .add(indexOrAlias(), "type1", "15")
- .add(indexOrAlias(), "type1", "3")
- .add(indexOrAlias(), "type1", "9")
- .add(indexOrAlias(), "type1", "11")
+ .add(indexOrAlias(), "1")
+ .add(indexOrAlias(), "15")
+ .add(indexOrAlias(), "3")
+ .add(indexOrAlias(), "9")
+ .add(indexOrAlias(), "11")
.get();
assertThat(response.getResponses().length, equalTo(5));
assertThat(response.getResponses()[0].getId(), equalTo("1"));
@@ -278,8 +278,8 @@ public void testSimpleMultiGet() throws Exception {
// multi get with specific field
response = client().prepareMultiGet()
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").storedFields("field"))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "3").storedFields("field"))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "1").storedFields("field"))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "3").storedFields("field"))
.get();
assertThat(response.getResponses().length, equalTo(2));
@@ -304,16 +304,15 @@ public void testGetDocWithMultivaluedFields() throws Exception {
assertAcked(prepareCreate("test").addMapping("type1", mapping1, XContentType.JSON));
ensureGreen();
- GetResponse response = client().prepareGet("test", "type1", "1").get();
+ GetResponse response = client().prepareGet("test", "1").get();
assertThat(response.isExists(), equalTo(false));
assertThat(response.isExists(), equalTo(false));
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get();
- response = client().prepareGet("test", "type1", "1").setStoredFields("field").get();
+ response = client().prepareGet("test", "1").setStoredFields("field").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
- assertThat(response.getType(), equalTo("type1"));
Set fields = new HashSet<>(response.getFields().keySet());
assertThat(fields, equalTo(singleton("field")));
assertThat(response.getFields().get("field").getValues().size(), equalTo(2));
@@ -322,7 +321,7 @@ public void testGetDocWithMultivaluedFields() throws Exception {
// Now test values being fetched from stored fields.
refresh();
- response = client().prepareGet("test", "type1", "1").setStoredFields("field").get();
+ response = client().prepareGet("test", "1").setStoredFields("field").get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
fields = new HashSet<>(response.getFields().keySet());
@@ -336,7 +335,7 @@ public void testGetWithVersion() {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1)));
ensureGreen();
- GetResponse response = client().prepareGet("test", "type1", "1").get();
+ GetResponse response = client().prepareGet("test", "1").get();
assertThat(response.isExists(), equalTo(false));
logger.info("--> index doc 1");
@@ -344,18 +343,18 @@ public void testGetWithVersion() {
// From translog:
- response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get();
+ response = client().prepareGet(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getVersion(), equalTo(1L));
- response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get();
+ response = client().prepareGet(indexOrAlias(), "1").setVersion(1).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getVersion(), equalTo(1L));
try {
- client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get();
+ client().prepareGet(indexOrAlias(), "1").setVersion(2).get();
fail();
} catch (VersionConflictEngineException e) {
// all good
@@ -364,20 +363,20 @@ public void testGetWithVersion() {
// From Lucene index:
refresh();
- response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
+ response = client().prepareGet(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(1L));
- response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
+ response = client().prepareGet(indexOrAlias(), "1").setVersion(1).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(1L));
try {
- client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get();
+ client().prepareGet(indexOrAlias(), "1").setVersion(2).setRealtime(false).get();
fail();
} catch (VersionConflictEngineException e) {
// all good
@@ -388,20 +387,20 @@ public void testGetWithVersion() {
// From translog:
- response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get();
+ response = client().prepareGet(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(2L));
try {
- client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get();
+ client().prepareGet(indexOrAlias(), "1").setVersion(1).get();
fail();
} catch (VersionConflictEngineException e) {
// all good
}
- response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get();
+ response = client().prepareGet(indexOrAlias(), "1").setVersion(2).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@@ -410,20 +409,20 @@ public void testGetWithVersion() {
// From Lucene index:
refresh();
- response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
+ response = client().prepareGet(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(2L));
try {
- client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
+ client().prepareGet(indexOrAlias(), "1").setVersion(1).setRealtime(false).get();
fail();
} catch (VersionConflictEngineException e) {
// all good
}
- response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get();
+ response = client().prepareGet(indexOrAlias(), "1").setVersion(2).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@@ -434,7 +433,7 @@ public void testMultiGetWithVersion() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1)));
ensureGreen();
- MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "type1", "1").get();
+ MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "1").get();
assertThat(response.getResponses().length, equalTo(1));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false));
@@ -444,9 +443,9 @@ public void testMultiGetWithVersion() throws Exception {
// Version from translog
response = client().prepareMultiGet()
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(Versions.MATCH_ANY))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(1))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(2))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(Versions.MATCH_ANY))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(1))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
@@ -468,9 +467,9 @@ public void testMultiGetWithVersion() throws Exception {
// Version from Lucene index
refresh();
response = client().prepareMultiGet()
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(Versions.MATCH_ANY))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(1))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(2))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(Versions.MATCH_ANY))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(1))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(2))
.setRealtime(false)
.get();
assertThat(response.getResponses().length, equalTo(3));
@@ -494,9 +493,9 @@ public void testMultiGetWithVersion() throws Exception {
// Version from translog
response = client().prepareMultiGet()
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(Versions.MATCH_ANY))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(1))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(2))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(Versions.MATCH_ANY))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(1))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
@@ -518,9 +517,9 @@ public void testMultiGetWithVersion() throws Exception {
// Version from Lucene index
refresh();
response = client().prepareMultiGet()
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(Versions.MATCH_ANY))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(1))
- .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(2))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(Versions.MATCH_ANY))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(1))
+ .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(2))
.setRealtime(false)
.get();
assertThat(response.getResponses().length, equalTo(3));
@@ -569,16 +568,13 @@ public void testGetFieldsNonLeafField() throws Exception {
IllegalArgumentException exc = expectThrows(
IllegalArgumentException.class,
- () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get()
+ () -> client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").get()
);
assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field"));
flush();
- exc = expectThrows(
- IllegalArgumentException.class,
- () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get()
- );
+ exc = expectThrows(IllegalArgumentException.class, () -> client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").get());
assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field"));
}
@@ -649,13 +645,13 @@ public void testGetFieldsComplexField() throws Exception {
logger.info("checking real time retrieval");
String field = "field1.field2.field3.field4";
- GetResponse getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get();
+ GetResponse getResponse = client().prepareGet("my-index", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
- getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get();
+ getResponse = client().prepareGet("my-index", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
@@ -680,7 +676,7 @@ public void testGetFieldsComplexField() throws Exception {
logger.info("checking post-flush retrieval");
- getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get();
+ getResponse = client().prepareGet("my-index", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
@@ -891,7 +887,7 @@ protected void assertGetFieldNull(String index, String type, String docId, Strin
}
private GetResponse multiGetDocument(String index, String type, String docId, String field, @Nullable String routing) {
- MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, type, docId).storedFields(field);
+ MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, docId).storedFields(field);
if (routing != null) {
getItem.routing(routing);
}
@@ -902,7 +898,7 @@ private GetResponse multiGetDocument(String index, String type, String docId, St
}
private GetResponse getDocument(String index, String type, String docId, String field, @Nullable String routing) {
- GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setType(type).setId(docId).setStoredFields(field);
+ GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setId(docId).setStoredFields(field);
if (routing != null) {
getRequestBuilder.setRouting(routing);
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java
index 84e1231a7b8b4..359d40e3b7b9f 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java
@@ -224,7 +224,7 @@ public void testRequestPipelineAndFinalPipeline() {
index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
final IndexResponse response = index.get();
assertThat(response.status(), equalTo(RestStatus.CREATED));
- final GetRequestBuilder get = client().prepareGet("index", "_doc", "1");
+ final GetRequestBuilder get = client().prepareGet("index", "1");
final GetResponse getResponse = get.get();
assertTrue(getResponse.isExists());
final Map source = getResponse.getSourceAsMap();
@@ -252,7 +252,7 @@ public void testDefaultAndFinalPipeline() {
index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
final IndexResponse response = index.get();
assertThat(response.status(), equalTo(RestStatus.CREATED));
- final GetRequestBuilder get = client().prepareGet("index", "_doc", "1");
+ final GetRequestBuilder get = client().prepareGet("index", "1");
final GetResponse getResponse = get.get();
assertTrue(getResponse.isExists());
final Map source = getResponse.getSourceAsMap();
@@ -302,7 +302,7 @@ public void testDefaultAndFinalPipelineFromTemplates() {
index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
final IndexResponse response = index.get();
assertThat(response.status(), equalTo(RestStatus.CREATED));
- final GetRequestBuilder get = client().prepareGet("index", "_doc", "1");
+ final GetRequestBuilder get = client().prepareGet("index", "1");
final GetResponse getResponse = get.get();
assertTrue(getResponse.isExists());
final Map source = getResponse.getSourceAsMap();
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/ShardIndexingPressureSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/index/ShardIndexingPressureSettingsIT.java
index 5c0abf72c3bf0..1cdf8e702aafa 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/ShardIndexingPressureSettingsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/ShardIndexingPressureSettingsIT.java
@@ -403,6 +403,8 @@ public void testShardIndexingPressureEnforcedEnabledDisabledSetting() throws Exc
secondSuccessFuture = client(coordinatingOnlyNode).bulk(bulkRequest);
Thread.sleep(25);
+ waitForTwoOutstandingRequests(coordinatingShardTracker);
+
// This request breaches the threshold and hence will be rejected
expectThrows(OpenSearchRejectedExecutionException.class, () -> client(coordinatingOnlyNode).bulk(bulkRequest).actionGet());
@@ -636,6 +638,7 @@ public void testShardIndexingPressureLastSuccessfulSettingsUpdate() throws Excep
IndexingPressureService.class,
coordinatingOnlyNode
).getShardIndexingPressure().getShardIndexingPressureTracker(shardId);
+ waitForTwoOutstandingRequests(coordinatingShardTracker);
expectThrows(OpenSearchRejectedExecutionException.class, () -> client(coordinatingOnlyNode).bulk(bulkRequest).actionGet());
assertEquals(1, coordinatingShardTracker.getCoordinatingOperationTracker().getRejectionTracker().getTotalRejections());
assertEquals(
@@ -648,6 +651,7 @@ public void testShardIndexingPressureLastSuccessfulSettingsUpdate() throws Excep
ShardIndexingPressureTracker primaryShardTracker = internalCluster().getInstance(IndexingPressureService.class, primaryName)
.getShardIndexingPressure()
.getShardIndexingPressureTracker(shardId);
+ waitForTwoOutstandingRequests(primaryShardTracker);
expectThrows(OpenSearchRejectedExecutionException.class, () -> client(primaryName).bulk(bulkRequest).actionGet());
assertEquals(1, primaryShardTracker.getCoordinatingOperationTracker().getRejectionTracker().getTotalRejections());
assertEquals(
@@ -920,6 +924,12 @@ private String getCoordinatingOnlyNode() {
.getName();
}
+ private static void waitForTwoOutstandingRequests(ShardIndexingPressureTracker tracker) throws Exception {
+ assertBusy(
+ () -> { assertEquals(tracker.getCoordinatingOperationTracker().getPerformanceTracker().getTotalOutstandingRequests(), 2); }
+ );
+ }
+
private void restartCluster(Settings settings) throws Exception {
internalCluster().fullRestart(new InternalTestCluster.RestartCallback() {
@Override
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/engine/InternalEngineMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/index/engine/InternalEngineMergeIT.java
index 47d7e974357d8..06ec4dc6d2812 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/engine/InternalEngineMergeIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/engine/InternalEngineMergeIT.java
@@ -71,7 +71,6 @@ public void testMergesHappening() throws Exception {
for (int j = 0; j < numDocs; ++j) {
request.add(
Requests.indexRequest("test")
- .type("type1")
.id(Long.toString(id++))
.source(jsonBuilder().startObject().field("l", randomLong()).endObject())
);
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java
index 28d92909a7f93..cb01295ae734c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java
@@ -93,10 +93,10 @@ public void testConflictingDynamicMappingsBulk() {
assertTrue(bulkResponse.hasFailures());
}
- private static void assertMappingsHaveField(GetMappingsResponse mappings, String index, String type, String field) throws IOException {
+ private static void assertMappingsHaveField(GetMappingsResponse mappings, String index, String field) throws IOException {
ImmutableOpenMap indexMappings = mappings.getMappings().get("index");
assertNotNull(indexMappings);
- MappingMetadata typeMappings = indexMappings.get(type);
+ MappingMetadata typeMappings = indexMappings.get(MapperService.SINGLE_MAPPING_NAME);
assertNotNull(typeMappings);
Map typeMappingsMap = typeMappings.getSourceAsMap();
Map properties = (Map) typeMappingsMap.get("properties");
@@ -134,12 +134,12 @@ public void run() {
throw error.get();
}
Thread.sleep(2000);
- GetMappingsResponse mappings = client().admin().indices().prepareGetMappings("index").setTypes("type").get();
+ GetMappingsResponse mappings = client().admin().indices().prepareGetMappings("index").get();
for (int i = 0; i < indexThreads.length; ++i) {
- assertMappingsHaveField(mappings, "index", "type", "field" + i);
+ assertMappingsHaveField(mappings, "index", "field" + i);
}
for (int i = 0; i < indexThreads.length; ++i) {
- assertTrue(client().prepareGet("index", "type", Integer.toString(i)).get().isExists());
+ assertTrue(client().prepareGet("index", Integer.toString(i)).get().isExists());
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/seqno/RetentionLeaseIT.java b/server/src/internalClusterTest/java/org/opensearch/index/seqno/RetentionLeaseIT.java
index e1c56129c9f4b..ed6074b39c8a7 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/seqno/RetentionLeaseIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/seqno/RetentionLeaseIT.java
@@ -43,7 +43,6 @@
import org.opensearch.common.unit.TimeValue;
import org.opensearch.index.IndexService;
import org.opensearch.index.IndexSettings;
-import org.opensearch.index.engine.Engine;
import org.opensearch.index.shard.IndexShard;
import org.opensearch.index.shard.ShardId;
import org.opensearch.indices.IndicesService;
@@ -122,7 +121,7 @@ public void testRetentionLeasesSyncedOnAdd() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
final ActionListener listener = countDownLatchListener(latch);
// simulate a peer recovery which locks the soft deletes policy on the primary
- final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock(Engine.HistorySource.INDEX) : () -> {};
+ final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock() : () -> {};
currentRetentionLeases.put(id, primary.addRetentionLease(id, retainingSequenceNumber, source, listener));
latch.await();
retentionLock.close();
@@ -175,7 +174,7 @@ public void testRetentionLeaseSyncedOnRemove() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
final ActionListener listener = countDownLatchListener(latch);
// simulate a peer recovery which locks the soft deletes policy on the primary
- final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock(Engine.HistorySource.INDEX) : () -> {};
+ final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock() : () -> {};
currentRetentionLeases.put(id, primary.addRetentionLease(id, retainingSequenceNumber, source, listener));
latch.await();
retentionLock.close();
@@ -186,7 +185,7 @@ public void testRetentionLeaseSyncedOnRemove() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
primary.removeRetentionLease(id, countDownLatchListener(latch));
// simulate a peer recovery which locks the soft deletes policy on the primary
- final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock(Engine.HistorySource.INDEX) : () -> {};
+ final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock() : () -> {};
currentRetentionLeases.remove(id);
latch.await();
retentionLock.close();
@@ -346,22 +345,16 @@ public void testBackgroundRetentionLeaseSync() throws Exception {
)
);
}
- assertBusy(
- () -> {
- // check all retention leases have been synced to all replicas
- for (final ShardRouting replicaShard : clusterService().state()
- .routingTable()
- .index("index")
- .shard(0)
- .replicaShards()) {
- final String replicaShardNodeId = replicaShard.currentNodeId();
- final String replicaShardNodeName = clusterService().state().nodes().get(replicaShardNodeId).getName();
- final IndexShard replica = internalCluster().getInstance(IndicesService.class, replicaShardNodeName)
- .getShardOrNull(new ShardId(resolveIndex("index"), 0));
- assertThat(replica.getRetentionLeases(), equalTo(primary.getRetentionLeases()));
- }
+ assertBusy(() -> {
+ // check all retention leases have been synced to all replicas
+ for (final ShardRouting replicaShard : clusterService().state().routingTable().index("index").shard(0).replicaShards()) {
+ final String replicaShardNodeId = replicaShard.currentNodeId();
+ final String replicaShardNodeName = clusterService().state().nodes().get(replicaShardNodeId).getName();
+ final IndexShard replica = internalCluster().getInstance(IndicesService.class, replicaShardNodeName)
+ .getShardOrNull(new ShardId(resolveIndex("index"), 0));
+ assertThat(replica.getRetentionLeases(), equalTo(primary.getRetentionLeases()));
}
- );
+ });
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
index ff234694a441c..fa0fcfdbea628 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
@@ -36,9 +36,6 @@
import org.opensearch.ExceptionsHelper;
import org.opensearch.Version;
import org.opensearch.action.ActionListener;
-import org.opensearch.action.admin.cluster.node.stats.NodeStats;
-import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse;
-import org.opensearch.action.admin.indices.stats.IndexStats;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchResponse;
@@ -58,7 +55,6 @@
import org.opensearch.common.CheckedRunnable;
import org.opensearch.common.Strings;
import org.opensearch.common.UUIDs;
-import org.opensearch.common.breaker.CircuitBreaker;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.lucene.uid.Versions;
import org.opensearch.common.settings.Settings;
@@ -77,7 +73,6 @@
import org.opensearch.index.engine.CommitStats;
import org.opensearch.index.engine.Engine;
import org.opensearch.index.engine.NoOpEngine;
-import org.opensearch.index.engine.SegmentsStats;
import org.opensearch.index.flush.FlushStats;
import org.opensearch.index.mapper.SourceToParse;
import org.opensearch.index.seqno.RetentionLeaseSyncer;
@@ -87,11 +82,9 @@
import org.opensearch.index.translog.TranslogStats;
import org.opensearch.indices.IndicesService;
import org.opensearch.indices.breaker.CircuitBreakerService;
-import org.opensearch.indices.breaker.CircuitBreakerStats;
import org.opensearch.indices.recovery.RecoveryState;
import org.opensearch.indices.replication.checkpoint.TransportCheckpointPublisher;
import org.opensearch.plugins.Plugin;
-import org.opensearch.search.aggregations.AggregationBuilders;
import org.opensearch.search.builder.SearchSourceBuilder;
import org.opensearch.test.DummyShardLock;
import org.opensearch.test.OpenSearchSingleNodeTestCase;
@@ -124,7 +117,6 @@
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiLettersOfLength;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
-import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.BREAKER;
import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.opensearch.action.support.WriteRequest.RefreshPolicy.NONE;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
@@ -137,13 +129,11 @@
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.allOf;
-import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.either;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
-import static org.hamcrest.Matchers.notNullValue;
public class IndexShardIT extends OpenSearchSingleNodeTestCase {
@@ -180,24 +170,6 @@ public void testLockTryingToDelete() throws Exception {
}
}
- public void testMarkAsInactiveTriggersSyncedFlush() throws Exception {
- assertAcked(
- client().admin()
- .indices()
- .prepareCreate("test")
- .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))
- );
- client().prepareIndex("test", "test").setSource("{}", XContentType.JSON).get();
- ensureGreen("test");
- IndicesService indicesService = getInstanceFromNode(IndicesService.class);
- indicesService.indexService(resolveIndex("test")).getShardOrNull(0).checkIdle(0);
- assertBusy(() -> {
- IndexStats indexStats = client().admin().indices().prepareStats("test").clear().setTranslog(true).get().getIndex("test");
- assertThat(indexStats.getTotal().translog.getUncommittedOperations(), equalTo(0));
- indicesService.indexService(resolveIndex("test")).getShardOrNull(0).checkIdle(0);
- });
- }
-
public void testDurableFlagHasEffect() throws Exception {
createIndex("test");
ensureGreen();
@@ -663,86 +635,6 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResul
}
}
- /** Check that the accounting breaker correctly matches the segments API for memory usage */
- private void checkAccountingBreaker() {
- CircuitBreakerService breakerService = getInstanceFromNode(CircuitBreakerService.class);
- CircuitBreaker acctBreaker = breakerService.getBreaker(CircuitBreaker.ACCOUNTING);
- long usedMem = acctBreaker.getUsed();
- assertThat(usedMem, greaterThan(0L));
- NodesStatsResponse response = client().admin().cluster().prepareNodesStats().setIndices(true).addMetric(BREAKER.metricName()).get();
- NodeStats stats = response.getNodes().get(0);
- assertNotNull(stats);
- SegmentsStats segmentsStats = stats.getIndices().getSegments();
- CircuitBreakerStats breakerStats = stats.getBreaker().getStats(CircuitBreaker.ACCOUNTING);
- assertEquals(usedMem, segmentsStats.getMemoryInBytes());
- assertEquals(usedMem, breakerStats.getEstimated());
- }
-
- public void testCircuitBreakerIncrementedByIndexShard() throws Exception {
- client().admin()
- .cluster()
- .prepareUpdateSettings()
- .setTransientSettings(Settings.builder().put("network.breaker.inflight_requests.overhead", 0.0))
- .get();
-
- // Generate a couple of segments
- client().prepareIndex("test", "_doc", "1")
- .setSource("{\"foo\":\"" + randomAlphaOfLength(100) + "\"}", XContentType.JSON)
- .setRefreshPolicy(IMMEDIATE)
- .get();
- // Use routing so 2 documents are guaranteed to be on the same shard
- String routing = randomAlphaOfLength(5);
- client().prepareIndex("test", "_doc", "2")
- .setSource("{\"foo\":\"" + randomAlphaOfLength(100) + "\"}", XContentType.JSON)
- .setRefreshPolicy(IMMEDIATE)
- .setRouting(routing)
- .get();
- client().prepareIndex("test", "_doc", "3")
- .setSource("{\"foo\":\"" + randomAlphaOfLength(100) + "\"}", XContentType.JSON)
- .setRefreshPolicy(IMMEDIATE)
- .setRouting(routing)
- .get();
-
- checkAccountingBreaker();
- // Test that force merging causes the breaker to be correctly adjusted
- logger.info("--> force merging to a single segment");
- client().admin().indices().prepareForceMerge("test").setMaxNumSegments(1).setFlush(randomBoolean()).get();
- client().admin().indices().prepareRefresh().get();
- checkAccountingBreaker();
-
- client().admin()
- .cluster()
- .prepareUpdateSettings()
- .setTransientSettings(Settings.builder().put("indices.breaker.total.limit", "1kb"))
- .get();
-
- // Test that we're now above the parent limit due to the segments
- Exception e = expectThrows(
- Exception.class,
- () -> client().prepareSearch("test").addAggregation(AggregationBuilders.terms("foo_terms").field("foo.keyword")).get()
- );
- logger.info("--> got an expected exception", e);
- assertThat(e.getCause(), notNullValue());
- assertThat(e.getCause().getMessage(), containsString("[parent] Data too large, data for []"));
-
- client().admin()
- .cluster()
- .prepareUpdateSettings()
- .setTransientSettings(
- Settings.builder().putNull("indices.breaker.total.limit").putNull("network.breaker.inflight_requests.overhead")
- )
- .get();
-
- // Test that deleting the index causes the breaker to correctly be decremented
- logger.info("--> deleting index");
- client().admin().indices().prepareDelete("test").get();
-
- // Accounting breaker should now be 0
- CircuitBreakerService breakerService = getInstanceFromNode(CircuitBreakerService.class);
- CircuitBreaker acctBreaker = breakerService.getBreaker(CircuitBreaker.ACCOUNTING);
- assertThat(acctBreaker.getUsed(), equalTo(0L));
- }
-
public static final IndexShard recoverShard(IndexShard newShard) throws IOException {
DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT);
newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null));
@@ -912,7 +804,6 @@ public void testLimitNumberOfRetainedTranslogFiles() throws Exception {
Settings.Builder settings = Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
- .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false)
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_TOTAL_FILES_SETTING.getKey(), translogRetentionTotalFiles);
if (randomBoolean()) {
settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), new ByteSizeValue(between(1, 1024 * 1024)));
diff --git a/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java
index ffc8e74875c58..3819b42e799ed 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java
@@ -99,7 +99,7 @@ public void testAutoGenerateIdNoDuplicates() throws Exception {
}
try {
logger.debug("running search with a specific type");
- SearchResponse response = client().prepareSearch("test").setTypes("type").get();
+ SearchResponse response = client().prepareSearch("test").get();
if (response.getHits().getTotalHits().value != numOfDocs) {
final String message = "Count is "
+ response.getHits().getTotalHits().value
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java
index ec90d271b9127..19e1e196daad0 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java
@@ -80,22 +80,22 @@ public void testIndexNameDateMathExpressions() {
assertHitCount(searchResponse, 3);
assertSearchHits(searchResponse, "1", "2", "3");
- GetResponse getResponse = client().prepareGet(dateMathExp1, "type", "1").get();
+ GetResponse getResponse = client().prepareGet(dateMathExp1, "1").get();
assertThat(getResponse.isExists(), is(true));
assertThat(getResponse.getId(), equalTo("1"));
- getResponse = client().prepareGet(dateMathExp2, "type", "2").get();
+ getResponse = client().prepareGet(dateMathExp2, "2").get();
assertThat(getResponse.isExists(), is(true));
assertThat(getResponse.getId(), equalTo("2"));
- getResponse = client().prepareGet(dateMathExp3, "type", "3").get();
+ getResponse = client().prepareGet(dateMathExp3, "3").get();
assertThat(getResponse.isExists(), is(true));
assertThat(getResponse.getId(), equalTo("3"));
MultiGetResponse mgetResponse = client().prepareMultiGet()
- .add(dateMathExp1, "type", "1")
- .add(dateMathExp2, "type", "2")
- .add(dateMathExp3, "type", "3")
+ .add(dateMathExp1, "1")
+ .add(dateMathExp2, "2")
+ .add(dateMathExp3, "3")
.get();
assertThat(mgetResponse.getResponses()[0].getResponse().isExists(), is(true));
assertThat(mgetResponse.getResponses()[0].getResponse().getId(), equalTo("1"));
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/flush/FlushIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/flush/FlushIT.java
deleted file mode 100644
index 277e83fa51379..0000000000000
--- a/server/src/internalClusterTest/java/org/opensearch/indices/flush/FlushIT.java
+++ /dev/null
@@ -1,495 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package org.opensearch.indices.flush;
-
-import org.apache.lucene.index.Term;
-import org.opensearch.action.ActionListener;
-import org.opensearch.action.admin.indices.flush.FlushRequest;
-import org.opensearch.action.admin.indices.flush.FlushResponse;
-import org.opensearch.action.admin.indices.flush.SyncedFlushResponse;
-import org.opensearch.action.admin.indices.stats.IndexStats;
-import org.opensearch.action.admin.indices.stats.ShardStats;
-import org.opensearch.action.support.ActiveShardCount;
-import org.opensearch.cluster.ClusterState;
-import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.cluster.routing.ShardRouting;
-import org.opensearch.cluster.routing.allocation.command.MoveAllocationCommand;
-import org.opensearch.common.UUIDs;
-import org.opensearch.common.ValidationException;
-import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
-import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.index.Index;
-import org.opensearch.index.IndexService;
-import org.opensearch.index.IndexSettings;
-import org.opensearch.index.engine.Engine;
-import org.opensearch.index.engine.InternalEngine;
-import org.opensearch.index.engine.InternalEngineTests;
-import org.opensearch.index.mapper.ParsedDocument;
-import org.opensearch.index.mapper.Uid;
-import org.opensearch.index.seqno.SequenceNumbers;
-import org.opensearch.index.shard.IndexShard;
-import org.opensearch.index.shard.IndexShardTestCase;
-import org.opensearch.index.shard.ShardId;
-import org.opensearch.indices.IndexingMemoryController;
-import org.opensearch.indices.IndicesService;
-import org.opensearch.plugins.Plugin;
-import org.opensearch.test.OpenSearchIntegTestCase;
-import org.opensearch.test.InternalSettingsPlugin;
-import org.opensearch.test.InternalTestCluster;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.stream.Collectors;
-
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.emptyArray;
-import static org.hamcrest.Matchers.emptyIterable;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.not;
-import static org.hamcrest.Matchers.nullValue;
-
-@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST)
-public class FlushIT extends OpenSearchIntegTestCase {
-
- @Override
- protected Collection> nodePlugins() {
- return Collections.singletonList(InternalSettingsPlugin.class);
- }
-
- public void testWaitIfOngoing() throws InterruptedException {
- createIndex("test");
- ensureGreen("test");
- final int numIters = scaledRandomIntBetween(10, 30);
- for (int i = 0; i < numIters; i++) {
- for (int j = 0; j < 10; j++) {
- client().prepareIndex("test", "test").setSource("{}", XContentType.JSON).get();
- }
- final CountDownLatch latch = new CountDownLatch(10);
- final CopyOnWriteArrayList errors = new CopyOnWriteArrayList<>();
- for (int j = 0; j < 10; j++) {
- client().admin().indices().prepareFlush("test").execute(new ActionListener() {
- @Override
- public void onResponse(FlushResponse flushResponse) {
- try {
- // don't use assertAllSuccessful it uses a randomized context that belongs to a different thread
- assertThat(
- "Unexpected ShardFailures: " + Arrays.toString(flushResponse.getShardFailures()),
- flushResponse.getFailedShards(),
- equalTo(0)
- );
- latch.countDown();
- } catch (Exception ex) {
- onFailure(ex);
- }
-
- }
-
- @Override
- public void onFailure(Exception e) {
- errors.add(e);
- latch.countDown();
- }
- });
- }
- latch.await();
- assertThat(errors, emptyIterable());
- }
- }
-
- public void testRejectIllegalFlushParameters() {
- createIndex("test");
- int numDocs = randomIntBetween(0, 10);
- for (int i = 0; i < numDocs; i++) {
- client().prepareIndex("test", "_doc").setSource("{}", XContentType.JSON).get();
- }
- assertThat(
- expectThrows(
- ValidationException.class,
- () -> client().admin().indices().flush(new FlushRequest().force(true).waitIfOngoing(false)).actionGet()
- ).getMessage(),
- containsString("wait_if_ongoing must be true for a force flush")
- );
- assertThat(
- client().admin().indices().flush(new FlushRequest().force(true).waitIfOngoing(true)).actionGet().getShardFailures(),
- emptyArray()
- );
- assertThat(
- client().admin().indices().flush(new FlushRequest().force(false).waitIfOngoing(randomBoolean())).actionGet().getShardFailures(),
- emptyArray()
- );
- }
-
- public void testSyncedFlush() throws Exception {
- internalCluster().ensureAtLeastNumDataNodes(2);
- prepareCreate("test").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)).get();
- ensureGreen();
-
- final Index index = client().admin().cluster().prepareState().get().getState().metadata().index("test").getIndex();
-
- IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
- for (ShardStats shardStats : indexStats.getShards()) {
- assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
- }
-
- ShardsSyncedFlushResult result;
- if (randomBoolean()) {
- logger.info("--> sync flushing shard 0");
- result = SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), new ShardId(index, 0));
- } else {
- logger.info("--> sync flushing index [test]");
- SyncedFlushResponse indicesResult = client().admin().indices().prepareSyncedFlush("test").get();
- result = indicesResult.getShardsResultPerIndex().get("test").get(0);
- }
- assertFalse(result.failed());
- assertThat(result.totalShards(), equalTo(indexStats.getShards().length));
- assertThat(result.successfulShards(), equalTo(indexStats.getShards().length));
-
- indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
- String syncId = result.syncId();
- for (ShardStats shardStats : indexStats.getShards()) {
- final String shardSyncId = shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID);
- assertThat(shardSyncId, equalTo(syncId));
- }
-
- // now, start new node and relocate a shard there and see if sync id still there
- String newNodeName = internalCluster().startNode();
- ClusterState clusterState = client().admin().cluster().prepareState().get().getState();
- ShardRouting shardRouting = clusterState.getRoutingTable().index("test").shard(0).iterator().next();
- String currentNodeName = clusterState.nodes().resolveNode(shardRouting.currentNodeId()).getName();
- assertFalse(currentNodeName.equals(newNodeName));
- internalCluster().client()
- .admin()
- .cluster()
- .prepareReroute()
- .add(new MoveAllocationCommand("test", 0, currentNodeName, newNodeName))
- .get();
-
- client().admin().cluster().prepareHealth().setWaitForNoRelocatingShards(true).get();
- indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
- for (ShardStats shardStats : indexStats.getShards()) {
- assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
- }
-
- client().admin()
- .indices()
- .prepareUpdateSettings("test")
- .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build())
- .get();
- ensureGreen("test");
- indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
- for (ShardStats shardStats : indexStats.getShards()) {
- assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
- }
- client().admin()
- .indices()
- .prepareUpdateSettings("test")
- .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, internalCluster().numDataNodes() - 1).build())
- .get();
- ensureGreen("test");
- indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
- for (ShardStats shardStats : indexStats.getShards()) {
- assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
- }
- }
-
- public void testSyncedFlushWithConcurrentIndexing() throws Exception {
-
- internalCluster().ensureAtLeastNumDataNodes(3);
- createIndex("test");
-
- client().admin()
- .indices()
- .prepareUpdateSettings("test")
- .setSettings(
- Settings.builder()
- .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB))
- .put("index.refresh_interval", -1)
- .put("index.number_of_replicas", internalCluster().numDataNodes() - 1)
- )
- .get();
- ensureGreen();
- final AtomicBoolean stop = new AtomicBoolean(false);
- final AtomicInteger numDocs = new AtomicInteger(0);
- Thread indexingThread = new Thread() {
- @Override
- public void run() {
- while (stop.get() == false) {
- client().prepareIndex().setIndex("test").setType("_doc").setSource("{}", XContentType.JSON).get();
- numDocs.incrementAndGet();
- }
- }
- };
- indexingThread.start();
-
- IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
- for (ShardStats shardStats : indexStats.getShards()) {
- assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
- }
- logger.info("--> trying sync flush");
- SyncedFlushResponse syncedFlushResult = client().admin().indices().prepareSyncedFlush("test").get();
- logger.info("--> sync flush done");
- stop.set(true);
- indexingThread.join();
- indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
- assertFlushResponseEqualsShardStats(indexStats.getShards(), syncedFlushResult.getShardsResultPerIndex().get("test"));
- refresh();
- assertThat(client().prepareSearch().setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs.get()));
- logger.info("indexed {} docs", client().prepareSearch().setSize(0).get().getHits().getTotalHits().value);
- logClusterState();
- internalCluster().fullRestart();
- ensureGreen();
- assertThat(client().prepareSearch().setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs.get()));
- }
-
- private void assertFlushResponseEqualsShardStats(ShardStats[] shardsStats, List syncedFlushResults) {
-
- for (final ShardStats shardStats : shardsStats) {
- for (final ShardsSyncedFlushResult shardResult : syncedFlushResults) {
- if (shardStats.getShardRouting().getId() == shardResult.shardId().getId()) {
- for (Map.Entry singleResponse : shardResult.shardResponses()
- .entrySet()) {
- if (singleResponse.getKey().currentNodeId().equals(shardStats.getShardRouting().currentNodeId())) {
- if (singleResponse.getValue().success()) {
- logger.info(
- "{} sync flushed on node {}",
- singleResponse.getKey().shardId(),
- singleResponse.getKey().currentNodeId()
- );
- assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
- } else {
- logger.info(
- "{} sync flush failed for on node {}",
- singleResponse.getKey().shardId(),
- singleResponse.getKey().currentNodeId()
- );
- assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
- }
- }
- }
- }
- }
- }
- }
-
- public void testUnallocatedShardsDoesNotHang() throws InterruptedException {
- // create an index but disallow allocation
- prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE)
- .setSettings(Settings.builder().put("index.routing.allocation.include._name", "nonexistent"))
- .get();
-
- // this should not hang but instead immediately return with empty result set
- List shardsResult = client().admin()
- .indices()
- .prepareSyncedFlush("test")
- .get()
- .getShardsResultPerIndex()
- .get("test");
- // just to make sure the test actually tests the right thing
- int numShards = client().admin()
- .indices()
- .prepareGetSettings("test")
- .get()
- .getIndexToSettings()
- .get("test")
- .getAsInt(IndexMetadata.SETTING_NUMBER_OF_SHARDS, -1);
- assertThat(shardsResult.size(), equalTo(numShards));
- assertThat(shardsResult.get(0).failureReason(), equalTo("no active shards"));
- }
-
- private void indexDoc(Engine engine, String id) throws IOException {
- final ParsedDocument doc = InternalEngineTests.createParsedDoc(id, null);
- final Engine.IndexResult indexResult = engine.index(
- new Engine.Index(
- new Term("_id", Uid.encodeId(doc.id())),
- doc,
- ((InternalEngine) engine).getProcessedLocalCheckpoint() + 1,
- 1L,
- 1L,
- null,
- Engine.Operation.Origin.REPLICA,
- System.nanoTime(),
- -1L,
- false,
- SequenceNumbers.UNASSIGNED_SEQ_NO,
- 0
- )
- );
- assertThat(indexResult.getFailure(), nullValue());
- engine.syncTranslog();
- }
-
- public void testSyncedFlushSkipOutOfSyncReplicas() throws Exception {
- internalCluster().ensureAtLeastNumDataNodes(between(2, 3));
- final int numberOfReplicas = internalCluster().numDataNodes() - 1;
- assertAcked(
- prepareCreate("test").setSettings(
- Settings.builder()
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
- .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)
- ).get()
- );
- ensureGreen();
- final Index index = clusterService().state().metadata().index("test").getIndex();
- final ShardId shardId = new ShardId(index, 0);
- final int numDocs = between(1, 10);
- for (int i = 0; i < numDocs; i++) {
- index("test", "doc", Integer.toString(i));
- }
- final List indexShards = internalCluster().nodesInclude("test")
- .stream()
- .map(node -> internalCluster().getInstance(IndicesService.class, node).getShardOrNull(shardId))
- .collect(Collectors.toList());
- // Index extra documents to one replica - synced-flush should fail on that replica.
- final IndexShard outOfSyncReplica = randomValueOtherThanMany(s -> s.routingEntry().primary(), () -> randomFrom(indexShards));
- final int extraDocs = between(1, 10);
- for (int i = 0; i < extraDocs; i++) {
- indexDoc(IndexShardTestCase.getEngine(outOfSyncReplica), "extra_" + i);
- }
- final ShardsSyncedFlushResult partialResult = SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId);
- assertThat(partialResult.totalShards(), equalTo(numberOfReplicas + 1));
- assertThat(partialResult.successfulShards(), equalTo(numberOfReplicas));
- assertThat(
- partialResult.shardResponses().get(outOfSyncReplica.routingEntry()).failureReason,
- equalTo(
- "ongoing indexing operations: num docs on replica [" + (numDocs + extraDocs) + "]; num docs on primary [" + numDocs + "]"
- )
- );
- // Index extra documents to all shards - synced-flush should be ok.
- for (IndexShard indexShard : indexShards) {
- // Do reindex documents to the out of sync replica to avoid trigger merges
- if (indexShard != outOfSyncReplica) {
- for (int i = 0; i < extraDocs; i++) {
- indexDoc(IndexShardTestCase.getEngine(indexShard), "extra_" + i);
- }
- }
- }
- final ShardsSyncedFlushResult fullResult = SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId);
- assertThat(fullResult.totalShards(), equalTo(numberOfReplicas + 1));
- assertThat(fullResult.successfulShards(), equalTo(numberOfReplicas + 1));
- }
-
- public void testDoNotRenewSyncedFlushWhenAllSealed() throws Exception {
- internalCluster().ensureAtLeastNumDataNodes(between(2, 3));
- final int numberOfReplicas = internalCluster().numDataNodes() - 1;
- assertAcked(
- prepareCreate("test").setSettings(
- Settings.builder()
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
- .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)
- ).get()
- );
- ensureGreen();
- final Index index = clusterService().state().metadata().index("test").getIndex();
- final ShardId shardId = new ShardId(index, 0);
- final int numDocs = between(1, 10);
- for (int i = 0; i < numDocs; i++) {
- index("test", "doc", Integer.toString(i));
- }
- final ShardsSyncedFlushResult firstSeal = SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId);
- assertThat(firstSeal.successfulShards(), equalTo(numberOfReplicas + 1));
- // Do not renew synced-flush
- final ShardsSyncedFlushResult secondSeal = SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId);
- assertThat(secondSeal.successfulShards(), equalTo(numberOfReplicas + 1));
- assertThat(secondSeal.syncId(), equalTo(firstSeal.syncId()));
- // Shards were updated, renew synced flush.
- final int moreDocs = between(1, 10);
- for (int i = 0; i < moreDocs; i++) {
- index("test", "doc", "more-" + i);
- }
- final ShardsSyncedFlushResult thirdSeal = SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId);
- assertThat(thirdSeal.successfulShards(), equalTo(numberOfReplicas + 1));
- assertThat(thirdSeal.syncId(), not(equalTo(firstSeal.syncId())));
- // Manually remove or change sync-id, renew synced flush.
- IndexShard shard = internalCluster().getInstance(IndicesService.class, randomFrom(internalCluster().nodesInclude("test")))
- .getShardOrNull(shardId);
- if (randomBoolean()) {
- // Change the existing sync-id of a single shard.
- shard.syncFlush(UUIDs.randomBase64UUID(random()), shard.commitStats().getRawCommitId());
- assertThat(shard.commitStats().syncId(), not(equalTo(thirdSeal.syncId())));
- } else {
- // Flush will create a new commit without sync-id
- shard.flush(new FlushRequest(shardId.getIndexName()).force(true).waitIfOngoing(true));
- assertThat(shard.commitStats().syncId(), nullValue());
- }
- final ShardsSyncedFlushResult forthSeal = SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId);
- assertThat(forthSeal.successfulShards(), equalTo(numberOfReplicas + 1));
- assertThat(forthSeal.syncId(), not(equalTo(thirdSeal.syncId())));
- }
-
- public void testFlushOnInactive() throws Exception {
- final String indexName = "flush_on_inactive";
- List dataNodes = internalCluster().startDataOnlyNodes(
- 2,
- Settings.builder().put(IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.getKey(), randomTimeValue(10, 1000, "ms")).build()
- );
- assertAcked(
- client().admin()
- .indices()
- .prepareCreate(indexName)
- .setSettings(
- Settings.builder()
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
- .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
- .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), randomTimeValue(200, 500, "ms"))
- .put(IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING.getKey(), randomTimeValue(50, 200, "ms"))
- .put("index.routing.allocation.include._name", String.join(",", dataNodes))
- .build()
- )
- );
- ensureGreen(indexName);
- int numDocs = randomIntBetween(1, 10);
- for (int i = 0; i < numDocs; i++) {
- client().prepareIndex(indexName, "_doc").setSource("f", "v").get();
- }
- if (randomBoolean()) {
- internalCluster().restartNode(randomFrom(dataNodes), new InternalTestCluster.RestartCallback());
- ensureGreen(indexName);
- }
- assertBusy(() -> {
- for (ShardStats shardStats : client().admin().indices().prepareStats(indexName).get().getShards()) {
- assertThat(shardStats.getStats().getTranslog().getUncommittedOperations(), equalTo(0));
- }
- }, 30, TimeUnit.SECONDS);
- }
-}
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java
index 5ad516a6514fb..0afe067afb686 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java
@@ -130,7 +130,7 @@ public void testDynamicUpdates() throws Exception {
for (int rec = 0; rec < recCount; rec++) {
String type = "type";
String fieldName = "field_" + type + "_" + rec;
- assertConcreteMappingsOnAll("test", type, fieldName);
+ assertConcreteMappingsOnAll("test", fieldName);
}
client().admin()
@@ -377,7 +377,7 @@ public void testPutMappingsWithBlocks() {
* Waits until mappings for the provided fields exist on all nodes. Note, this waits for the current
* started shards and checks for concrete mappings.
*/
- private void assertConcreteMappingsOnAll(final String index, final String type, final String... fieldNames) {
+ private void assertConcreteMappingsOnAll(final String index, final String... fieldNames) {
Set nodes = internalCluster().nodesInclude(index);
assertThat(nodes, Matchers.not(Matchers.emptyIterable()));
for (String node : nodes) {
@@ -390,17 +390,17 @@ private void assertConcreteMappingsOnAll(final String index, final String type,
assertNotNull("field " + fieldName + " doesn't exists on " + node, fieldType);
}
}
- assertMappingOnMaster(index, type, fieldNames);
+ assertMappingOnMaster(index, fieldNames);
}
/**
* Waits for the given mapping type to exists on the master node.
*/
- private void assertMappingOnMaster(final String index, final String type, final String... fieldNames) {
- GetMappingsResponse response = client().admin().indices().prepareGetMappings(index).setTypes(type).get();
+ private void assertMappingOnMaster(final String index, final String... fieldNames) {
+ GetMappingsResponse response = client().admin().indices().prepareGetMappings(index).get();
ImmutableOpenMap mappings = response.getMappings().get(index);
assertThat(mappings, notNullValue());
- MappingMetadata mappingMetadata = mappings.get(type);
+ MappingMetadata mappingMetadata = mappings.get(MapperService.SINGLE_MAPPING_NAME);
assertThat(mappingMetadata, notNullValue());
Map mappingSource = mappingMetadata.getSourceAsMap();
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java
index 2c72379e6b711..0772bc2965c4c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java
@@ -102,8 +102,6 @@ private void reset() {
HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING,
HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING,
HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING,
- HierarchyCircuitBreakerService.ACCOUNTING_CIRCUIT_BREAKER_LIMIT_SETTING,
- HierarchyCircuitBreakerService.ACCOUNTING_CIRCUIT_BREAKER_OVERHEAD_SETTING,
HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING,
HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_OVERHEAD_SETTING,
HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING
@@ -405,7 +403,7 @@ public void testLimitsRequestSize() {
int numRequests = inFlightRequestsLimit.bytesAsInt();
BulkRequest bulkRequest = new BulkRequest();
for (int i = 0; i < numRequests; i++) {
- IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i));
+ IndexRequest indexRequest = new IndexRequest("index").id(Integer.toString(i));
indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field", "value", "num", i);
bulkRequest.add(indexRequest);
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
index 52e7e7b55bbd6..3bab909d3b7f3 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
@@ -81,6 +81,7 @@
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.OpenSearchRejectedExecutionException;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.gateway.ReplicaShardAllocatorIT;
import org.opensearch.index.Index;
import org.opensearch.index.IndexService;
import org.opensearch.index.IndexSettings;
@@ -100,7 +101,6 @@
import org.opensearch.indices.IndicesService;
import org.opensearch.indices.NodeIndicesStats;
import org.opensearch.indices.analysis.AnalysisModule;
-import org.opensearch.indices.flush.SyncedFlushUtil;
import org.opensearch.indices.recovery.RecoveryState.Stage;
import org.opensearch.node.NodeClosedException;
import org.opensearch.node.RecoverySettingsChunkSizePlugin;
@@ -138,7 +138,6 @@
import java.util.Collections;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Semaphore;
@@ -148,7 +147,6 @@
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
-import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static java.util.Collections.singletonMap;
@@ -403,7 +401,23 @@ public void testCancelNewShardRecoveryAndUsesExistingShardCopy() throws Exceptio
final String nodeA = internalCluster().startNode();
logger.info("--> create index on node: {}", nodeA);
- createAndPopulateIndex(INDEX_NAME, 1, SHARD_COUNT, REPLICA_COUNT).getShards()[0].getStats().getStore().size();
+ createIndex(
+ INDEX_NAME,
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
+ .put(IndexService.RETENTION_LEASE_SYNC_INTERVAL_SETTING.getKey(), "100ms")
+ .put(IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING.getKey(), "100ms")
+ .build()
+ );
+
+ int numDocs = randomIntBetween(10, 200);
+ final IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
+ for (int i = 0; i < numDocs; i++) {
+ docs[i] = client().prepareIndex(INDEX_NAME, INDEX_TYPE)
+ .setSource("foo-int", randomInt(), "foo-string", randomAlphaOfLength(32), "foo-float", randomFloat());
+ }
+ indexRandom(randomBoolean(), docs);
logger.info("--> start node B");
// force a shard recovery from nodeA to nodeB
@@ -425,8 +439,7 @@ public void testCancelNewShardRecoveryAndUsesExistingShardCopy() throws Exceptio
logger.info("--> start node C");
final String nodeC = internalCluster().startNode();
- // do sync flush to gen sync id
- assertThat(client().admin().indices().prepareSyncedFlush(INDEX_NAME).get().failedShards(), equalTo(0));
+ ReplicaShardAllocatorIT.ensureActivePeerRecoveryRetentionLeasesAdvanced(INDEX_NAME);
// hold peer recovery on phase 2 after nodeB down
CountDownLatch phase1ReadyBlocked = new CountDownLatch(1);
@@ -1524,93 +1537,6 @@ public void testOngoingRecoveryAndMasterFailOver() throws Exception {
ensureGreen(indexName);
}
- public void testRecoveryFlushReplica() throws Exception {
- internalCluster().ensureAtLeastNumDataNodes(3);
- String indexName = "test-index";
- createIndex(indexName, Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 1).build());
- int numDocs = randomIntBetween(0, 10);
- indexRandom(
- randomBoolean(),
- false,
- randomBoolean(),
- IntStream.range(0, numDocs).mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)).collect(toList())
- );
- assertAcked(
- client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder().put("index.number_of_replicas", 1))
- );
- ensureGreen(indexName);
- ShardId shardId = null;
- for (ShardStats shardStats : client().admin().indices().prepareStats(indexName).get().getIndex(indexName).getShards()) {
- shardId = shardStats.getShardRouting().shardId();
- if (shardStats.getShardRouting().primary() == false) {
- assertThat(shardStats.getCommitStats().getNumDocs(), equalTo(numDocs));
- SequenceNumbers.CommitInfo commitInfo = SequenceNumbers.loadSeqNoInfoFromLuceneCommit(
- shardStats.getCommitStats().getUserData().entrySet()
- );
- assertThat(commitInfo.localCheckpoint, equalTo(shardStats.getSeqNoStats().getLocalCheckpoint()));
- assertThat(commitInfo.maxSeqNo, equalTo(shardStats.getSeqNoStats().getMaxSeqNo()));
- }
- }
- SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId);
- assertBusy(() -> assertThat(client().admin().indices().prepareSyncedFlush(indexName).get().failedShards(), equalTo(0)));
- assertAcked(
- client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder().put("index.number_of_replicas", 2))
- );
- ensureGreen(indexName);
- // Recovery should keep syncId if no indexing activity on the primary after synced-flush.
- Set syncIds = Stream.of(client().admin().indices().prepareStats(indexName).get().getIndex(indexName).getShards())
- .map(shardStats -> shardStats.getCommitStats().syncId())
- .collect(Collectors.toSet());
- assertThat(syncIds, hasSize(1));
- }
-
- public void testRecoveryUsingSyncedFlushWithoutRetentionLease() throws Exception {
- internalCluster().ensureAtLeastNumDataNodes(2);
- String indexName = "test-index";
- createIndex(
- indexName,
- Settings.builder()
- .put("index.number_of_shards", 1)
- .put("index.number_of_replicas", 1)
- .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "24h") // do not reallocate the lost shard
- .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.getKey(), "100ms") // expire leases quickly
- .put(IndexService.RETENTION_LEASE_SYNC_INTERVAL_SETTING.getKey(), "100ms") // sync frequently
- .build()
- );
- int numDocs = randomIntBetween(0, 10);
- indexRandom(
- randomBoolean(),
- false,
- randomBoolean(),
- IntStream.range(0, numDocs).mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)).collect(toList())
- );
- ensureGreen(indexName);
-
- final ShardId shardId = new ShardId(resolveIndex(indexName), 0);
- assertThat(SyncedFlushUtil.attemptSyncedFlush(logger, internalCluster(), shardId).successfulShards(), equalTo(2));
-
- final ClusterState clusterState = client().admin().cluster().prepareState().get().getState();
- final ShardRouting shardToResync = randomFrom(clusterState.routingTable().shardRoutingTable(shardId).activeShards());
- internalCluster().restartNode(
- clusterState.nodes().get(shardToResync.currentNodeId()).getName(),
- new InternalTestCluster.RestartCallback() {
- @Override
- public Settings onNodeStopped(String nodeName) throws Exception {
- assertBusy(
- () -> assertFalse(
- client().admin().indices().prepareStats(indexName).get().getShards()[0].getRetentionLeaseStats()
- .retentionLeases()
- .contains(ReplicationTracker.getPeerRecoveryRetentionLeaseId(shardToResync))
- )
- );
- return super.onNodeStopped(nodeName);
- }
- }
- );
-
- ensureGreen(indexName);
- }
-
public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(2);
List nodes = randomSubsetOf(
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java
index db8f9ea360598..aebb891ae784b 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java
@@ -484,11 +484,7 @@ public void testRecoverExistingReplica() throws Exception {
.collect(toList())
);
ensureGreen(indexName);
- if (randomBoolean()) {
- client().admin().indices().prepareFlush(indexName).get();
- } else {
- client().admin().indices().prepareSyncedFlush(indexName).get();
- }
+ client().admin().indices().prepareFlush(indexName).get();
// index more documents while one shard copy is offline
internalCluster().restartNode(dataNodes.get(1), new InternalTestCluster.RestartCallback() {
@Override
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java
index ae99949c62b3e..95a421b126bae 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java
@@ -49,7 +49,6 @@
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.IndexNotFoundException;
-import org.opensearch.index.IndexSettings;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.rest.RestStatus;
import org.opensearch.test.OpenSearchIntegTestCase;
@@ -332,7 +331,7 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte
// check the index still contains the records that we indexed
client().admin().indices().prepareOpen("test").execute().get();
ensureGreen();
- SearchResponse searchResponse = client().prepareSearch().setTypes("type").setQuery(QueryBuilders.matchQuery("test", "init")).get();
+ SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, docs);
}
@@ -395,9 +394,6 @@ public void testOpenCloseIndexWithBlocks() {
public void testTranslogStats() throws Exception {
final String indexName = "test";
createIndex(indexName, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build());
- boolean softDeletesEnabled = IndexSettings.INDEX_SOFT_DELETES_SETTING.get(
- client().admin().indices().prepareGetSettings(indexName).get().getIndexToSettings().get(indexName)
- );
final int nbDocs = randomIntBetween(0, 50);
int uncommittedOps = 0;
@@ -419,7 +415,7 @@ public void testTranslogStats() throws Exception {
assertThat(stats.getIndex(indexName), notNullValue());
assertThat(
stats.getIndex(indexName).getPrimaries().getTranslog().estimatedNumberOfOperations(),
- equalTo(softDeletesEnabled ? uncommittedTranslogOps : nbDocs)
+ equalTo(uncommittedTranslogOps)
);
assertThat(stats.getIndex(indexName).getPrimaries().getTranslog().getUncommittedOperations(), equalTo(uncommittedTranslogOps));
});
@@ -435,10 +431,7 @@ public void testTranslogStats() throws Exception {
.setTranslog(true)
.get();
assertThat(stats.getIndex(indexName), notNullValue());
- assertThat(
- stats.getIndex(indexName).getPrimaries().getTranslog().estimatedNumberOfOperations(),
- equalTo(softDeletesEnabled ? 0 : nbDocs)
- );
+ assertThat(stats.getIndex(indexName).getPrimaries().getTranslog().estimatedNumberOfOperations(), equalTo(0));
assertThat(stats.getIndex(indexName).getPrimaries().getTranslog().getUncommittedOperations(), equalTo(0));
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java
index 4dd6646670bd0..cca01a9ec6dcb 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java
@@ -694,7 +694,7 @@ public void testSimpleStats() throws Exception {
assertThat(stats.getTotal().getRefresh(), notNullValue());
// check get
- GetResponse getResponse = client().prepareGet("test2", "type", "1").execute().actionGet();
+ GetResponse getResponse = client().prepareGet("test2", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
stats = client().admin().indices().prepareStats().execute().actionGet();
@@ -703,7 +703,7 @@ public void testSimpleStats() throws Exception {
assertThat(stats.getTotal().getGet().getMissingCount(), equalTo(0L));
// missing get
- getResponse = client().prepareGet("test2", "type", "2").execute().actionGet();
+ getResponse = client().prepareGet("test2", "2").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false));
stats = client().admin().indices().prepareStats().execute().actionGet();
@@ -828,7 +828,6 @@ public void testSegmentsStats() {
assertThat(stats.getTotal().getSegments(), notNullValue());
assertThat(stats.getTotal().getSegments().getCount(), equalTo((long) test1.totalNumShards));
- assertThat(stats.getTotal().getSegments().getMemoryInBytes(), greaterThan(0L));
}
public void testAllFlags() throws Exception {
@@ -1080,40 +1079,6 @@ public void testGroupsParam() throws Exception {
}
- public void testTypesParam() throws Exception {
- createIndex("test1");
- createIndex("test2");
-
- ensureGreen();
-
- client().prepareIndex("test1", "bar", Integer.toString(1)).setSource("foo", "bar").execute().actionGet();
- client().prepareIndex("test2", "baz", Integer.toString(1)).setSource("foo", "bar").execute().actionGet();
- refresh();
-
- IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats();
- IndicesStatsResponse stats = builder.execute().actionGet();
-
- assertThat(stats.getTotal().indexing.getTotal().getIndexCount(), greaterThan(0L));
- assertThat(stats.getTotal().indexing.getTypeStats(), is(nullValue()));
-
- stats = builder.setTypes("bar").execute().actionGet();
- assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L));
- assertThat(stats.getTotal().indexing.getTypeStats().containsKey("baz"), is(false));
-
- stats = builder.setTypes("bar", "baz").execute().actionGet();
- assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L));
- assertThat(stats.getTotal().indexing.getTypeStats().get("baz").getIndexCount(), greaterThan(0L));
-
- stats = builder.setTypes("*").execute().actionGet();
- assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L));
- assertThat(stats.getTotal().indexing.getTypeStats().get("baz").getIndexCount(), greaterThan(0L));
-
- stats = builder.setTypes("*r").execute().actionGet();
- assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L));
- assertThat(stats.getTotal().indexing.getTypeStats().containsKey("baz"), is(false));
-
- }
-
private static void set(Flag flag, IndicesStatsRequestBuilder builder, boolean set) {
switch (flag) {
case Docs:
@@ -1248,9 +1213,7 @@ public void testFilterCacheStats() throws Exception {
client().prepareIndex("index", "type", "1").setSource("foo", "bar"),
client().prepareIndex("index", "type", "2").setSource("foo", "baz")
);
- if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(settings)) {
- persistGlobalCheckpoint("index"); // Need to persist the global checkpoint for the soft-deletes retention MP.
- }
+ persistGlobalCheckpoint("index"); // Need to persist the global checkpoint for the soft-deletes retention MP.
refresh();
ensureGreen();
@@ -1287,22 +1250,20 @@ public void testFilterCacheStats() throws Exception {
// Here we are testing that a fully deleted segment should be dropped and its cached is evicted.
// In order to instruct the merge policy not to keep a fully deleted segment,
// we need to flush and make that commit safe so that the SoftDeletesPolicy can drop everything.
- if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(settings)) {
- persistGlobalCheckpoint("index");
- assertBusy(() -> {
- for (final ShardStats shardStats : client().admin().indices().prepareStats("index").get().getIndex("index").getShards()) {
- final long maxSeqNo = shardStats.getSeqNoStats().getMaxSeqNo();
- assertTrue(
- shardStats.getRetentionLeaseStats()
- .retentionLeases()
- .leases()
- .stream()
- .allMatch(retentionLease -> retentionLease.retainingSequenceNumber() == maxSeqNo + 1)
- );
- }
- });
- flush("index");
- }
+ persistGlobalCheckpoint("index");
+ assertBusy(() -> {
+ for (final ShardStats shardStats : client().admin().indices().prepareStats("index").get().getIndex("index").getShards()) {
+ final long maxSeqNo = shardStats.getSeqNoStats().getMaxSeqNo();
+ assertTrue(
+ shardStats.getRetentionLeaseStats()
+ .retentionLeases()
+ .leases()
+ .stream()
+ .allMatch(retentionLease -> retentionLease.retainingSequenceNumber() == maxSeqNo + 1)
+ );
+ }
+ });
+ flush("index");
logger.info("--> force merging to a single segment");
ForceMergeResponse forceMergeResponse = client().admin()
.indices()
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java
index f6a8f5fdfee90..c4a4227c0bc9c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java
@@ -839,11 +839,10 @@ public void testStrictAliasParsingInIndicesCreatedViaTemplates() throws Exceptio
.get();
client().prepareIndex("a1", "test", "test").setSource("{}", XContentType.JSON).get();
- BulkResponse response = client().prepareBulk().add(new IndexRequest("a2", "test", "test").source("{}", XContentType.JSON)).get();
+ BulkResponse response = client().prepareBulk().add(new IndexRequest("a2").id("test").source("{}", XContentType.JSON)).get();
assertThat(response.hasFailures(), is(false));
assertThat(response.getItems()[0].isFailed(), equalTo(false));
assertThat(response.getItems()[0].getIndex(), equalTo("a2"));
- assertThat(response.getItems()[0].getType(), equalTo("test"));
assertThat(response.getItems()[0].getId(), equalTo("test"));
assertThat(response.getItems()[0].getVersion(), equalTo(1L));
@@ -857,7 +856,7 @@ public void testStrictAliasParsingInIndicesCreatedViaTemplates() throws Exceptio
// an index that doesn't exist yet will succeed
client().prepareIndex("b1", "test", "test").setSource("{}", XContentType.JSON).get();
- response = client().prepareBulk().add(new IndexRequest("b2", "test", "test").source("{}", XContentType.JSON)).get();
+ response = client().prepareBulk().add(new IndexRequest("b2").id("test").source("{}", XContentType.JSON)).get();
assertThat(response.hasFailures(), is(false));
assertThat(response.getItems()[0].isFailed(), equalTo(false));
assertThat(response.getItems()[0].getId(), equalTo("test"));
diff --git a/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java b/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java
index 4ef8b2ba38e67..6317dd62418f3 100644
--- a/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java
@@ -138,7 +138,7 @@ public void testSimulate() throws Exception {
source.put("foo", "bar");
source.put("fail", false);
source.put("processed", true);
- IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, source);
+ IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, source);
assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata()));
assertThat(simulateDocumentBaseResult.getFailure(), nullValue());
@@ -167,7 +167,7 @@ public void testBulkWithIngestFailures() throws Exception {
int numRequests = scaledRandomIntBetween(32, 128);
BulkRequest bulkRequest = new BulkRequest();
for (int i = 0; i < numRequests; i++) {
- IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)).setPipeline("_id");
+ IndexRequest indexRequest = new IndexRequest("index").id(Integer.toString(i)).setPipeline("_id");
indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field", "value", "fail", i % 2 == 0);
bulkRequest.add(indexRequest);
}
@@ -216,10 +216,10 @@ public void testBulkWithUpsert() throws Exception {
client().admin().cluster().putPipeline(putPipelineRequest).get();
BulkRequest bulkRequest = new BulkRequest();
- IndexRequest indexRequest = new IndexRequest("index", "type", "1").setPipeline("_id");
+ IndexRequest indexRequest = new IndexRequest("index").id("1").setPipeline("_id");
indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field1", "val1");
bulkRequest.add(indexRequest);
- UpdateRequest updateRequest = new UpdateRequest("index", "type", "2");
+ UpdateRequest updateRequest = new UpdateRequest("index", "2");
updateRequest.doc("{}", Requests.INDEX_CONTENT_TYPE);
updateRequest.upsert("{\"field1\":\"upserted_val\"}", XContentType.JSON).upsertRequest().setPipeline("_id");
bulkRequest.add(updateRequest);
@@ -227,10 +227,10 @@ public void testBulkWithUpsert() throws Exception {
BulkResponse response = client().bulk(bulkRequest).actionGet();
assertThat(response.getItems().length, equalTo(bulkRequest.requests().size()));
- Map inserted = client().prepareGet("index", "type", "1").get().getSourceAsMap();
+ Map inserted = client().prepareGet("index", "1").get().getSourceAsMap();
assertThat(inserted.get("field1"), equalTo("val1"));
assertThat(inserted.get("processed"), equalTo(true));
- Map