From 189563bad689b2de2e3a8742b791a6f558533088 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Mar 2022 09:36:26 -0700 Subject: [PATCH 01/12] Bump commons-lang3 from 3.4 to 3.12.0 in /plugins/repository-azure (#2455) * Bump commons-lang3 from 3.4 to 3.12.0 in /plugins/repository-azure Bumps commons-lang3 from 3.4 to 3.12.0. --- updated-dependencies: - dependency-name: org.apache.commons:commons-lang3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-azure/build.gradle | 2 +- plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 | 1 + plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 | 1 - 3 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 1f923b8f36bbd..c531cd390e7ee 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -69,7 +69,7 @@ dependencies { api 'org.codehaus.woodstox:stax2-api:4.2.1' implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1' runtimeOnly 'com.google.guava:guava:31.1-jre' - api 'org.apache.commons:commons-lang3:3.4' + api 'org.apache.commons:commons-lang3:3.12.0' testImplementation project(':test:fixtures:azure-fixture') } diff --git a/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 new file mode 100644 index 0000000000000..9273d8c01aaba --- /dev/null +++ b/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 @@ -0,0 +1 @@ +c6842c86792ff03b9f1d1fe2aab8dc23aa6c6f0e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 deleted file mode 100644 index fdd7040377b8f..0000000000000 --- a/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5fe28b9518e58819180a43a850fbc0dd24b7c050 \ No newline at end of file From d19081356a2ee658568a3127829ab1ee8772a6fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Mar 2022 12:47:11 -0700 Subject: [PATCH 02/12] Bump woodstox-core from 6.1.1 to 6.2.8 in /plugins/repository-azure (#2456) * Bump woodstox-core from 6.1.1 to 6.2.8 in /plugins/repository-azure Bumps [woodstox-core](https://github.com/FasterXML/woodstox) from 6.1.1 to 6.2.8. - [Release notes](https://github.com/FasterXML/woodstox/releases) - [Commits](https://github.com/FasterXML/woodstox/compare/woodstox-core-6.1.1...woodstox-core-6.2.8) --- updated-dependencies: - dependency-name: com.fasterxml.woodstox:woodstox-core dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-azure/build.gradle | 2 +- plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 | 1 - plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 create mode 100644 plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index c531cd390e7ee..3dc089ef8acb7 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -67,7 +67,7 @@ dependencies { api "com.fasterxml.jackson.dataformat:jackson-dataformat-xml:${versions.jackson}" api "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}" api 'org.codehaus.woodstox:stax2-api:4.2.1' - implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1' + implementation 'com.fasterxml.woodstox:woodstox-core:6.2.8' runtimeOnly 'com.google.guava:guava:31.1-jre' api 'org.apache.commons:commons-lang3:3.12.0' testImplementation project(':test:fixtures:azure-fixture') diff --git a/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 b/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 deleted file mode 100644 index f2ad1c80882d3..0000000000000 --- a/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -989bb31963ed1758b95c7c4381a91592a9a8df61 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 b/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 new file mode 100644 index 0000000000000..ae65cdebf26de --- /dev/null +++ b/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 @@ -0,0 +1 @@ +670748292899c53b1963730d9eb7f8ab71314e90 \ No newline at end of file From 5c0f9bc499c5c4a744d2f29fb5bd9eab4aabc004 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Mon, 14 Mar 2022 17:11:26 -0400 Subject: [PATCH 03/12] Discrepancy in result from _validate/query API and actual query validity (#2416) * Discrepancy in result from _validate/query API and actual query validity Signed-off-by: Andriy Redko * Moved the validate() check later into the flow to allow range validation to trigger first Signed-off-by: Andriy Redko --- .../validate/SimpleValidateQueryIT.java | 97 +++++++++++++++++++ .../query/TransportValidateQueryAction.java | 4 +- .../org/opensearch/index/IndexService.java | 19 +++- .../index/query/QueryRewriteContext.java | 15 +++ .../index/query/QueryShardContext.java | 53 +++++++++- .../index/query/RangeQueryBuilder.java | 9 +- .../opensearch/indices/IndicesService.java | 16 ++- .../search/DefaultSearchContext.java | 6 +- .../org/opensearch/search/SearchService.java | 26 ++++- .../search/DefaultSearchContextTests.java | 27 ++++-- 10 files changed, 248 insertions(+), 24 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index 51d0a4395127a..29845b39becf2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -62,6 +62,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.index.query.QueryBuilders.queryStringQuery; +import static org.opensearch.index.query.QueryBuilders.rangeQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.allOf; @@ -500,4 +501,100 @@ public void testExplainTermsQueryWithLookup() throws Exception { .actionGet(); assertThat(response.isValid(), is(true)); } + + // Issue: https://github.com/opensearch-project/OpenSearch/issues/2036 + public void testValidateDateRangeInQueryString() throws IOException { + assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))); + + assertAcked( + client().admin() + .indices() + .preparePutMapping("test") + .setSource( + XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("name") + .field("type", "keyword") + .endObject() + .startObject("timestamp") + .field("type", "date") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + + client().prepareIndex("test").setId("1").setSource("name", "username", "timestamp", 200).get(); + refresh(); + + ValidateQueryResponse response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(100)) + .must(queryStringQuery("username").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(true)); + + // Use wildcard and date outside the range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(100)) + .must(queryStringQuery("*erna*").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + // Use wildcard and date inside the range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(1000)) + .must(queryStringQuery("*erna*").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + // Use wildcard and date inside the range (allow leading wildcard) + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery(QueryBuilders.boolQuery().must(rangeQuery("timestamp").gte(0).lte(1000)).must(queryStringQuery("*erna*"))) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(true)); + + // Use invalid date range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery(QueryBuilders.boolQuery().must(rangeQuery("timestamp").gte("aaa").lte(100))) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 1fb293b200e51..1849b41ce707f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -131,7 +131,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener if (request.query() == null) { rewriteListener.onResponse(request.query()); } else { - Rewriteable.rewriteAndFetch(request.query(), searchService.getRewriteContext(timeProvider), rewriteListener); + Rewriteable.rewriteAndFetch(request.query(), searchService.getValidationRewriteContext(timeProvider), rewriteListener); } } @@ -225,7 +225,7 @@ protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest re request.nowInMillis(), request.filteringAliases() ); - SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); + SearchContext searchContext = searchService.createValidationContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); try { ParsedQuery parsedQuery = searchContext.getQueryShardContext().toQuery(request.query()); searchContext.parsedQuery(parsedQuery); diff --git a/server/src/main/java/org/opensearch/index/IndexService.java b/server/src/main/java/org/opensearch/index/IndexService.java index 7c1033ecea3ad..1b301e85365ba 100644 --- a/server/src/main/java/org/opensearch/index/IndexService.java +++ b/server/src/main/java/org/opensearch/index/IndexService.java @@ -630,6 +630,22 @@ public IndexSettings getIndexSettings() { * {@link IndexReader}-specific optimizations, such as rewriting containing range queries. */ public QueryShardContext newQueryShardContext(int shardId, IndexSearcher searcher, LongSupplier nowInMillis, String clusterAlias) { + return newQueryShardContext(shardId, searcher, nowInMillis, clusterAlias, false); + } + + /** + * Creates a new QueryShardContext. + * + * Passing a {@code null} {@link IndexSearcher} will return a valid context, however it won't be able to make + * {@link IndexReader}-specific optimizations, such as rewriting containing range queries. + */ + public QueryShardContext newQueryShardContext( + int shardId, + IndexSearcher searcher, + LongSupplier nowInMillis, + String clusterAlias, + boolean validate + ) { final SearchIndexNameMatcher indexNameMatcher = new SearchIndexNameMatcher( index().getName(), clusterAlias, @@ -653,7 +669,8 @@ public QueryShardContext newQueryShardContext(int shardId, IndexSearcher searche clusterAlias, indexNameMatcher, allowExpensiveQueries, - valuesSourceRegistry + valuesSourceRegistry, + validate ); } diff --git a/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java index ad1f02ce0265d..720ee077119d6 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java @@ -52,6 +52,7 @@ public class QueryRewriteContext { protected final Client client; protected final LongSupplier nowInMillis; private final List>> asyncActions = new ArrayList<>(); + private final boolean validate; public QueryRewriteContext( NamedXContentRegistry xContentRegistry, @@ -59,11 +60,22 @@ public QueryRewriteContext( Client client, LongSupplier nowInMillis ) { + this(xContentRegistry, writeableRegistry, client, nowInMillis, false); + } + + public QueryRewriteContext( + NamedXContentRegistry xContentRegistry, + NamedWriteableRegistry writeableRegistry, + Client client, + LongSupplier nowInMillis, + boolean validate + ) { this.xContentRegistry = xContentRegistry; this.writeableRegistry = writeableRegistry; this.client = client; this.nowInMillis = nowInMillis; + this.validate = validate; } /** @@ -140,4 +152,7 @@ public void onFailure(Exception e) { } } + public boolean validate() { + return validate; + } } diff --git a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java index f67feadde4b41..bfc0490e507db 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java @@ -132,6 +132,48 @@ public QueryShardContext( Predicate indexNameMatcher, BooleanSupplier allowExpensiveQueries, ValuesSourceRegistry valuesSourceRegistry + ) { + this( + shardId, + indexSettings, + bigArrays, + bitsetFilterCache, + indexFieldDataLookup, + mapperService, + similarityService, + scriptService, + xContentRegistry, + namedWriteableRegistry, + client, + searcher, + nowInMillis, + clusterAlias, + indexNameMatcher, + allowExpensiveQueries, + valuesSourceRegistry, + false + ); + } + + public QueryShardContext( + int shardId, + IndexSettings indexSettings, + BigArrays bigArrays, + BitsetFilterCache bitsetFilterCache, + TriFunction, IndexFieldData> indexFieldDataLookup, + MapperService mapperService, + SimilarityService similarityService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + NamedWriteableRegistry namedWriteableRegistry, + Client client, + IndexSearcher searcher, + LongSupplier nowInMillis, + String clusterAlias, + Predicate indexNameMatcher, + BooleanSupplier allowExpensiveQueries, + ValuesSourceRegistry valuesSourceRegistry, + boolean validate ) { this( shardId, @@ -153,7 +195,8 @@ public QueryShardContext( indexSettings.getIndex().getUUID() ), allowExpensiveQueries, - valuesSourceRegistry + valuesSourceRegistry, + validate ); } @@ -175,7 +218,8 @@ public QueryShardContext(QueryShardContext source) { source.indexNameMatcher, source.fullyQualifiedIndex, source.allowExpensiveQueries, - source.valuesSourceRegistry + source.valuesSourceRegistry, + source.validate() ); } @@ -196,9 +240,10 @@ private QueryShardContext( Predicate indexNameMatcher, Index fullyQualifiedIndex, BooleanSupplier allowExpensiveQueries, - ValuesSourceRegistry valuesSourceRegistry + ValuesSourceRegistry valuesSourceRegistry, + boolean validate ) { - super(xContentRegistry, namedWriteableRegistry, client, nowInMillis); + super(xContentRegistry, namedWriteableRegistry, client, nowInMillis, validate); this.shardId = shardId; this.similarityService = similarityService; this.mapperService = mapperService; diff --git a/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java index 1c27946514a3d..80b792d750546 100644 --- a/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java @@ -452,7 +452,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC } DateMathParser dateMathParser = getForceDateParser(); - return fieldType.isFieldWithinQuery( + final MappedFieldType.Relation relation = fieldType.isFieldWithinQuery( shardContext.getIndexReader(), from, to, @@ -462,6 +462,13 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC dateMathParser, queryRewriteContext ); + + // For validation, always assume that there is an intersection + if (relation == MappedFieldType.Relation.DISJOINT && shardContext.validate()) { + return MappedFieldType.Relation.INTERSECTS; + } + + return relation; } // Not on the shard, we have no way to know what the relation is. diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 22ab5a9cd9c0b..5caafb0ce60d4 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -1632,7 +1632,21 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set context1.preProcess(false)); @@ -286,7 +291,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); SliceBuilder sliceBuilder = mock(SliceBuilder.class); @@ -323,7 +329,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery(); context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false); @@ -352,7 +359,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); context4.sliceBuilder(new SliceBuilder(1, 2)).parsedQuery(parsedQuery).preProcess(false); Query query1 = context4.query(); @@ -380,7 +388,9 @@ public void testClearQueryCancellationsOnClose() throws IOException { IndexService indexService = mock(IndexService.class); QueryShardContext queryShardContext = mock(QueryShardContext.class); - when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class))).thenReturn(queryShardContext); + when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class), anyBoolean())).thenReturn( + queryShardContext + ); BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); @@ -429,7 +439,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); assertThat(context.searcher().hasCancellations(), is(false)); context.searcher().addQueryCancellation(() -> {}); From 2b68b14629ba8bb5ccb1db3ed3806f837b56df15 Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Mon, 14 Mar 2022 19:09:04 -0700 Subject: [PATCH 04/12] [Remove] Type from TermsLookUp (#2459) * [Remove] Type from TermsLookUp Signed-off-by: Suraj Singh * Fix unit test failure Signed-off-by: Suraj Singh --- .../search/150_rewrite_on_coordinator.yml | 4 +- .../search/query/SearchQueryIT.java | 36 ++++------- .../validate/SimpleValidateQueryIT.java | 2 +- .../index/query/TermsQueryBuilder.java | 4 -- .../org/opensearch/indices/TermsLookup.java | 62 ++++--------------- .../index/query/TermsQueryBuilderTests.java | 11 +--- .../opensearch/indices/TermsLookupTests.java | 55 ++-------------- 7 files changed, 32 insertions(+), 142 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml index be34e10ddcd74..77298cb4f61c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml @@ -39,7 +39,7 @@ search: rest_total_hits_as_int: true index: "search_index" - body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type" : "_doc", "id": "1", "path": "followers"} } } } + body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } } - do: indices.create: index: lookup_index @@ -64,7 +64,7 @@ search: rest_total_hits_as_int: true index: "search_index" - body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type" : "_doc", "id": "1", "path": "followers"} } } } + body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } } - match: { _shards.total: 5 } - match: { _shards.successful: 5 } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index db87269c8ceae..c9bb746973226 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -1195,75 +1195,63 @@ public void testTermsLookupFilter() throws Exception { ); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); // same as above, just on the _id... - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "1", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); // another search with same parameters... - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "2", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "2", "terms"))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "3", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "3", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "2", "4"); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "4", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "4", "terms"))).get(); assertHitCount(searchResponse, 0L); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "1", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "1", "arr.term"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "2", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "2", "arr.term"))) .get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "3", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "3", "arr.term"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "2", "4"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))) + .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "3", "arr.term"))) .get(); assertHitCount(searchResponse, 0L); // index "lookup" type "type" id "missing" document does not exist: ignore the lookup terms searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "missing", "terms"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "missing", "terms"))) .get(); assertHitCount(searchResponse, 0L); // index "lookup3" type "type" has the source disabled: ignore the lookup terms - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "1", "terms"))).get(); assertHitCount(searchResponse, 0L); } diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index 29845b39becf2..30ab282bf3d44 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -491,7 +491,7 @@ public void testExplainTermsQueryWithLookup() throws Exception { client().prepareIndex("twitter").setId("1").setSource("followers", new int[] { 1, 2, 3 }).get(); refresh(); - TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "_doc", "1", "followers")); + TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "1", "followers")); ValidateQueryResponse response = client().admin() .indices() .prepareValidateQuery("twitter") diff --git a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java index e797730ac0dff..ac29cb2cf5201 100644 --- a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java @@ -225,10 +225,6 @@ public TermsLookup termsLookup() { return this.termsLookup; } - public boolean isTypeless() { - return termsLookup == null || termsLookup.type() == null; - } - private static final Set> INTEGER_TYPES = new HashSet<>( Arrays.asList(Byte.class, Short.class, Integer.class, Long.class) ); diff --git a/server/src/main/java/org/opensearch/indices/TermsLookup.java b/server/src/main/java/org/opensearch/indices/TermsLookup.java index 1aa16ad5cd72c..bf6c024fa130e 100644 --- a/server/src/main/java/org/opensearch/indices/TermsLookup.java +++ b/server/src/main/java/org/opensearch/indices/TermsLookup.java @@ -32,8 +32,7 @@ package org.opensearch.indices; -import org.opensearch.LegacyESVersion; -import org.opensearch.common.Nullable; +import org.opensearch.Version; import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -42,13 +41,13 @@ import org.opensearch.common.xcontent.ToXContentFragment; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.TermsQueryBuilder; import java.io.IOException; import java.util.Objects; import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Encapsulates the parameters needed to fetch terms. @@ -56,20 +55,11 @@ public class TermsLookup implements Writeable, ToXContentFragment { private final String index; - private @Nullable String type; private final String id; private final String path; private String routing; public TermsLookup(String index, String id, String path) { - this(index, null, id, path); - } - - /** - * @deprecated Types are in the process of being removed, use {@link TermsLookup(String, String, String)} instead. - */ - @Deprecated - public TermsLookup(String index, String type, String id, String path) { if (id == null) { throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the id."); } @@ -80,7 +70,6 @@ public TermsLookup(String index, String type, String id, String path) { throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the index."); } this.index = index; - this.type = type; this.id = id; this.path = path; } @@ -89,11 +78,8 @@ public TermsLookup(String index, String type, String id, String path) { * Read from a stream. */ public TermsLookup(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) { - type = in.readOptionalString(); - } else { - // Before 7.0, the type parameter was always non-null and serialized as a (non-optional) string. - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readOptionalString(); } id = in.readString(); path = in.readString(); @@ -103,16 +89,8 @@ public TermsLookup(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) { - out.writeOptionalString(type); - } else { - if (type == null) { - throw new IllegalArgumentException( - "Typeless [terms] lookup queries are not supported if any " + "node is running a version before 7.0." - ); - - } - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); } out.writeString(id); out.writeString(path); @@ -124,14 +102,6 @@ public String index() { return index; } - /** - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String type() { - return type; - } - public String id() { return id; } @@ -151,14 +121,12 @@ public TermsLookup routing(String routing) { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("terms_lookup", args -> { String index = (String) args[0]; - String type = (String) args[1]; - String id = (String) args[2]; - String path = (String) args[3]; - return new TermsLookup(index, type, id, path); + String id = (String) args[1]; + String path = (String) args[2]; + return new TermsLookup(index, id, path); }); static { PARSER.declareString(constructorArg(), new ParseField("index")); - PARSER.declareString(optionalConstructorArg(), new ParseField("type").withAllDeprecated()); PARSER.declareString(constructorArg(), new ParseField("id")); PARSER.declareString(constructorArg(), new ParseField("path")); PARSER.declareString(TermsLookup::routing, new ParseField("routing")); @@ -170,19 +138,12 @@ public static TermsLookup parseTermsLookup(XContentParser parser) throws IOExcep @Override public String toString() { - if (type == null) { - return index + "/" + id + "/" + path; - } else { - return index + "/" + type + "/" + id + "/" + path; - } + return index + "/" + id + "/" + path; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("index", index); - if (type != null) { - builder.field("type", type); - } builder.field("id", id); builder.field("path", path); if (routing != null) { @@ -193,7 +154,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { - return Objects.hash(index, type, id, path, routing); + return Objects.hash(index, id, path, routing); } @Override @@ -206,7 +167,6 @@ public boolean equals(Object obj) { } TermsLookup other = (TermsLookup) obj; return Objects.equals(index, other.index) - && Objects.equals(type, other.type) && Objects.equals(id, other.id) && Objects.equals(path, other.path) && Objects.equals(routing, other.routing); diff --git a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java index e37b4f1a1c39f..ea93d7a65b951 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java @@ -119,9 +119,7 @@ protected TermsQueryBuilder doCreateTestQueryBuilder() { private TermsLookup randomTermsLookup() { // Randomly choose between a typeless terms lookup and one with an explicit type to make sure we are - TermsLookup lookup = maybeIncludeType && randomBoolean() - ? new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath) - : new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath); + TermsLookup lookup = new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath); // testing both cases. lookup.routing(randomBoolean() ? randomAlphaOfLength(10) : null); return lookup; @@ -379,13 +377,6 @@ protected QueryBuilder parseQuery(XContentParser parser) throws IOException { try { QueryBuilder query = super.parseQuery(parser); assertThat(query, CoreMatchers.instanceOf(TermsQueryBuilder.class)); - - TermsQueryBuilder termsQuery = (TermsQueryBuilder) query; - String deprecationWarning = "Deprecated field [type] used, this field is unused and will be removed entirely"; - if (termsQuery.isTypeless() == false && !assertedWarnings.contains(deprecationWarning)) { - assertWarnings(deprecationWarning); - assertedWarnings.add(deprecationWarning); - } return query; } finally { diff --git a/server/src/test/java/org/opensearch/indices/TermsLookupTests.java b/server/src/test/java/org/opensearch/indices/TermsLookupTests.java index fb1462b500ea9..661995a22c507 100644 --- a/server/src/test/java/org/opensearch/indices/TermsLookupTests.java +++ b/server/src/test/java/org/opensearch/indices/TermsLookupTests.java @@ -45,42 +45,36 @@ public class TermsLookupTests extends OpenSearchTestCase { public void testTermsLookup() { String index = randomAlphaOfLengthBetween(1, 10); - String type = randomAlphaOfLengthBetween(1, 10); String id = randomAlphaOfLengthBetween(1, 10); String path = randomAlphaOfLengthBetween(1, 10); String routing = randomAlphaOfLengthBetween(1, 10); - TermsLookup termsLookup = new TermsLookup(index, type, id, path); + TermsLookup termsLookup = new TermsLookup(index, id, path); termsLookup.routing(routing); assertEquals(index, termsLookup.index()); - assertEquals(type, termsLookup.type()); assertEquals(id, termsLookup.id()); assertEquals(path, termsLookup.path()); assertEquals(routing, termsLookup.routing()); } public void testIllegalArguments() { - String type = randomAlphaOfLength(5); String id = randomAlphaOfLength(5); String path = randomAlphaOfLength(5); String index = randomAlphaOfLength(5); - switch (randomIntBetween(0, 3)) { + switch (randomIntBetween(0, 2)) { case 0: - type = null; - break; - case 1: id = null; break; - case 2: + case 1: path = null; break; - case 3: + case 2: index = null; break; default: fail("unknown case"); } try { - new TermsLookup(index, type, id, path); + new TermsLookup(index, id, path); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("[terms] query lookup element requires specifying")); } @@ -99,35 +93,6 @@ public void testSerialization() throws IOException { } } - public void testSerializationWithTypes() throws IOException { - TermsLookup termsLookup = randomTermsLookupWithTypes(); - try (BytesStreamOutput output = new BytesStreamOutput()) { - termsLookup.writeTo(output); - try (StreamInput in = output.bytes().streamInput()) { - TermsLookup deserializedLookup = new TermsLookup(in); - assertEquals(deserializedLookup, termsLookup); - assertEquals(deserializedLookup.hashCode(), termsLookup.hashCode()); - assertNotSame(deserializedLookup, termsLookup); - } - } - } - - public void testXContentParsingWithType() throws IOException { - XContentParser parser = createParser( - JsonXContent.jsonXContent, - "{ \"index\" : \"index\", \"id\" : \"id\", \"type\" : \"type\", \"path\" : \"path\", \"routing\" : \"routing\" }" - ); - - TermsLookup tl = TermsLookup.parseTermsLookup(parser); - assertEquals("index", tl.index()); - assertEquals("type", tl.type()); - assertEquals("id", tl.id()); - assertEquals("path", tl.path()); - assertEquals("routing", tl.routing()); - - assertWarnings("Deprecated field [type] used, this field is unused and will be removed entirely"); - } - public void testXContentParsing() throws IOException { XContentParser parser = createParser( JsonXContent.jsonXContent, @@ -136,7 +101,6 @@ public void testXContentParsing() throws IOException { TermsLookup tl = TermsLookup.parseTermsLookup(parser); assertEquals("index", tl.index()); - assertNull(tl.type()); assertEquals("id", tl.id()); assertEquals("path", tl.path()); assertEquals("routing", tl.routing()); @@ -147,13 +111,4 @@ public static TermsLookup randomTermsLookup() { randomBoolean() ? randomAlphaOfLength(10) : null ); } - - public static TermsLookup randomTermsLookupWithTypes() { - return new TermsLookup( - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomAlphaOfLength(10).replace('.', '_') - ).routing(randomBoolean() ? randomAlphaOfLength(10) : null); - } } From 02d000c514c6bab875d2985a2a77455a81576b41 Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Mon, 14 Mar 2022 20:46:23 -0700 Subject: [PATCH 05/12] [Remove] Type query (#2448) Signed-off-by: Suraj Singh --- .../index/mapper/DocumentMapper.java | 8 - .../index/query/TypeQueryBuilder.java | 158 ------------------ .../org/opensearch/search/SearchModule.java | 2 - .../index/query/TypeQueryBuilderTests.java | 90 ---------- .../opensearch/search/SearchModuleTests.java | 1 - 5 files changed, 259 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java delete mode 100644 server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java index 37e740ec33321..0ee0a3cb9a180 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java @@ -208,14 +208,6 @@ public T metadataMapper(Class type) { return mapping.metadataMapper(type); } - public IndexFieldMapper indexMapper() { - return metadataMapper(IndexFieldMapper.class); - } - - public TypeFieldMapper typeMapper() { - return metadataMapper(TypeFieldMapper.class); - } - public SourceFieldMapper sourceMapper() { return metadataMapper(SourceFieldMapper.class); } diff --git a/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java deleted file mode 100644 index d1ffcb394ec06..0000000000000 --- a/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.ParseField; -import org.opensearch.common.ParsingException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.common.lucene.search.Queries; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.index.mapper.DocumentMapper; - -import java.io.IOException; -import java.util.Objects; - -public class TypeQueryBuilder extends AbstractQueryBuilder { - public static final String NAME = "type"; - - private static final ParseField VALUE_FIELD = new ParseField("value"); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TypeQueryBuilder.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Type queries are deprecated, " - + "prefer to filter on a field instead."; - - private final String type; - - public TypeQueryBuilder(String type) { - if (type == null) { - throw new IllegalArgumentException("[type] cannot be null"); - } - this.type = type; - } - - /** - * Read from a stream. - */ - public TypeQueryBuilder(StreamInput in) throws IOException { - super(in); - type = in.readString(); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(type); - } - - public String type() { - return type; - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.field(VALUE_FIELD.getPreferredName(), type); - printBoostAndQueryName(builder); - builder.endObject(); - } - - public static TypeQueryBuilder fromXContent(XContentParser parser) throws IOException { - String type = null; - String queryName = null; - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - String currentFieldName = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - queryName = parser.text(); - } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - boost = parser.floatValue(); - } else if (VALUE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - type = parser.text(); - } else { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]" - ); - } - } else { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]" - ); - } - } - - if (type == null) { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter needs to be provided with a value for the type" - ); - } - return new TypeQueryBuilder(type).boost(boost).queryName(queryName); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - deprecationLogger.deprecate("type_query", TYPES_DEPRECATION_MESSAGE); - // LUCENE 4 UPGRADE document mapper should use bytesref as well? - DocumentMapper documentMapper = context.getMapperService().documentMapper(); - if (documentMapper == null) { - // no type means no documents - return new MatchNoDocsQuery(); - } else { - return Queries.newNonNestedFilter(context.indexVersionCreated()); - } - } - - @Override - protected int doHashCode() { - return Objects.hash(type); - } - - @Override - protected boolean doEquals(TypeQueryBuilder other) { - return Objects.equals(type, other.type); - } -} diff --git a/server/src/main/java/org/opensearch/search/SearchModule.java b/server/src/main/java/org/opensearch/search/SearchModule.java index cdc2509bbcb00..c052f7f89e14e 100644 --- a/server/src/main/java/org/opensearch/search/SearchModule.java +++ b/server/src/main/java/org/opensearch/search/SearchModule.java @@ -89,7 +89,6 @@ import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.query.TermsSetQueryBuilder; -import org.opensearch.index.query.TypeQueryBuilder; import org.opensearch.index.query.WildcardQueryBuilder; import org.opensearch.index.query.WrapperQueryBuilder; import org.opensearch.index.query.functionscore.ExponentialDecayFunctionBuilder; @@ -1183,7 +1182,6 @@ private void registerQueryParsers(List plugins) { registerQuery( new QuerySpec<>(SimpleQueryStringBuilder.NAME, SimpleQueryStringBuilder::new, SimpleQueryStringBuilder::fromXContent) ); - registerQuery(new QuerySpec<>(TypeQueryBuilder.NAME, TypeQueryBuilder::new, TypeQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(ScriptQueryBuilder.NAME, ScriptQueryBuilder::new, ScriptQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(GeoDistanceQueryBuilder.NAME, GeoDistanceQueryBuilder::new, GeoDistanceQueryBuilder::fromXContent)); registerQuery( diff --git a/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java deleted file mode 100644 index bf373ac180f04..0000000000000 --- a/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.lucene.search.Queries; -import org.opensearch.test.AbstractQueryTestCase; - -import java.io.IOException; - -import static org.hamcrest.Matchers.equalTo; - -public class TypeQueryBuilderTests extends AbstractQueryTestCase { - - @Override - protected TypeQueryBuilder doCreateTestQueryBuilder() { - return new TypeQueryBuilder("_doc"); - } - - @Override - protected void doAssertLuceneQuery(TypeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - if (createShardContext().getMapperService().documentMapper() == null) { - assertEquals(new MatchNoDocsQuery(), query); - } else { - assertThat(query, equalTo(Queries.newNonNestedFilter(context.indexVersionCreated()))); - } - } - - public void testIllegalArgument() { - expectThrows(IllegalArgumentException.class, () -> new TypeQueryBuilder((String) null)); - } - - public void testFromJson() throws IOException { - String json = "{\n" + " \"type\" : {\n" + " \"value\" : \"my_type\",\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; - - TypeQueryBuilder parsed = (TypeQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); - - assertEquals(json, "my_type", parsed.type()); - } - - @Override - public void testToQuery() throws IOException { - super.testToQuery(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } - - @Override - public void testMustRewrite() throws IOException { - super.testMustRewrite(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } - - @Override - public void testCacheability() throws IOException { - super.testCacheability(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/search/SearchModuleTests.java b/server/src/test/java/org/opensearch/search/SearchModuleTests.java index 19b61275b8f62..05d4153949f9a 100644 --- a/server/src/test/java/org/opensearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/opensearch/search/SearchModuleTests.java @@ -459,7 +459,6 @@ public List> getRescorers() { "term", "terms", "terms_set", - "type", "wildcard", "wrapper", "distance_feature" }; From 7df40ee1b098014cf3ef817acae303263ddc917f Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 15 Mar 2022 01:05:58 -0500 Subject: [PATCH 06/12] [Remove] type from TaskResults index and IndexMetadata.getMappings (#2469) Removes types from the TaskResults internal index along with the getMappings method from IndexMetadata. This is needed to further remove types from CreateIndexRequest. Signed-off-by: Nicholas Walter Knize --- .../admin/cluster/node/tasks/TasksIT.java | 5 ++ .../gateway/GatewayIndexStateIT.java | 19 ++---- .../opensearch/gateway/MetadataNodesIT.java | 10 +-- .../opensearch/action/index/IndexRequest.java | 2 +- .../cluster/metadata/IndexMetadata.java | 34 +++++----- .../cluster/metadata/MappingMetadata.java | 62 +++++-------------- .../opensearch/cluster/metadata/Metadata.java | 2 +- .../index/mapper/MapperService.java | 5 +- .../opensearch/index/shard/StoreRecovery.java | 5 +- .../opensearch/tasks/TaskResultsService.java | 8 +-- .../opensearch/tasks/task-index-mapping.json | 2 +- .../metadata/MetadataMappingServiceTests.java | 2 +- 12 files changed, 50 insertions(+), 106 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java index e1346492999be..fbac2f7dbff6e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java @@ -840,6 +840,11 @@ public void testTaskStoringSuccessfulResult() throws Exception { GetTaskResponse getResponse = expectFinishedTask(taskId); assertEquals(result, getResponse.getTask().getResponseAsMap()); assertNull(getResponse.getTask().getError()); + + // run it again to check that the tasks index has been successfully created and can be re-used + client().execute(TestTaskPlugin.TestTaskAction.INSTANCE, request).get(); + events = findEvents(TestTaskPlugin.TestTaskAction.NAME, Tuple::v1); + assertEquals(2, events.size()); } public void testTaskStoringFailureResult() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java index 6fe22e2a8fde4..2138e24cc9b4c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java @@ -60,7 +60,6 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.mapper.MapperParsingException; -import org.opensearch.index.mapper.MapperService; import org.opensearch.indices.IndexClosedException; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.test.OpenSearchIntegTestCase; @@ -123,9 +122,8 @@ public void testMappingMetadataParsed() throws Exception { .getState() .metadata() .index("test") - .getMappings() - .get(MapperService.SINGLE_MAPPING_NAME); - assertThat(mappingMd.routing().required(), equalTo(true)); + .mapping(); + assertThat(mappingMd.routingRequired(), equalTo(true)); logger.info("--> restarting nodes..."); internalCluster().fullRestart(); @@ -134,17 +132,8 @@ public void testMappingMetadataParsed() throws Exception { ensureYellow(); logger.info("--> verify meta _routing required exists"); - mappingMd = client().admin() - .cluster() - .prepareState() - .execute() - .actionGet() - .getState() - .metadata() - .index("test") - .getMappings() - .get(MapperService.SINGLE_MAPPING_NAME); - assertThat(mappingMd.routing().required(), equalTo(true)); + mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata().index("test").mapping(); + assertThat(mappingMd.routingRequired(), equalTo(true)); } public void testSimpleOpenClose() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java index 2731eb9a290d6..c9807aa24e259 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java @@ -153,11 +153,7 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { // make sure it was also written on red node although index is closed ImmutableOpenMap indicesMetadata = getIndicesMetadataOnNode(dataNode); - assertNotNull( - ((Map) (indicesMetadata.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get( - "integer_field" - ) - ); + assertNotNull(((Map) (indicesMetadata.get(index).mapping().getSourceAsMap().get("properties"))).get("integer_field")); assertThat(indicesMetadata.get(index).getState(), equalTo(IndexMetadata.State.CLOSE)); /* Try the same and see if this also works if node was just restarted. @@ -190,9 +186,7 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { // make sure it was also written on red node although index is closed indicesMetadata = getIndicesMetadataOnNode(dataNode); - assertNotNull( - ((Map) (indicesMetadata.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("float_field") - ); + assertNotNull(((Map) (indicesMetadata.get(index).mapping().getSourceAsMap().get("properties"))).get("float_field")); assertThat(indicesMetadata.get(index).getState(), equalTo(IndexMetadata.State.CLOSE)); // finally check that meta data is also written of index opened again diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequest.java b/server/src/main/java/org/opensearch/action/index/IndexRequest.java index ed77774bc01d3..7bf6b876fa652 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/opensearch/action/index/IndexRequest.java @@ -615,7 +615,7 @@ public VersionType versionType() { public void process(Version indexCreatedVersion, @Nullable MappingMetadata mappingMd, String concreteIndex) { if (mappingMd != null) { // might as well check for routing here - if (mappingMd.routing().required() && routing == null) { + if (mappingMd.routingRequired() && routing == null) { throw new RoutingMissingException(concreteIndex, id); } } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java index a7f351a918ae5..6510c57060fe0 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java @@ -660,17 +660,6 @@ public ImmutableOpenMap getAliases() { return this.aliases; } - /** - * Return an object that maps each type to the associated mappings. - * The return value is never {@code null} but may be empty if the index - * has no mappings. - * @deprecated Use {@link #mapping()} instead now that indices have a single type - */ - @Deprecated - public ImmutableOpenMap getMappings() { - return mappings; - } - /** * Return the concrete mapping for this index or {@code null} if this index has no mappings at all. */ @@ -1175,7 +1164,10 @@ public Builder putMapping(String source) throws IOException { } public Builder putMapping(MappingMetadata mappingMd) { - mappings.put(mappingMd.type(), mappingMd); + mappings.clear(); + if (mappingMd != null) { + mappings.put(mappingMd.type(), mappingMd); + } return this; } @@ -1464,23 +1456,25 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build if (context != Metadata.XContentContext.API) { builder.startArray(KEY_MAPPINGS); - for (ObjectObjectCursor cursor : indexMetadata.getMappings()) { + MappingMetadata mmd = indexMetadata.mapping(); + if (mmd != null) { if (binary) { - builder.value(cursor.value.source().compressed()); + builder.value(mmd.source().compressed()); } else { - builder.map(XContentHelper.convertToMap(cursor.value.source().uncompressed(), true).v2()); + builder.map(XContentHelper.convertToMap(mmd.source().uncompressed(), true).v2()); } } builder.endArray(); } else { builder.startObject(KEY_MAPPINGS); - for (ObjectObjectCursor cursor : indexMetadata.getMappings()) { - Map mapping = XContentHelper.convertToMap(cursor.value.source().uncompressed(), false).v2(); - if (mapping.size() == 1 && mapping.containsKey(cursor.key)) { + MappingMetadata mmd = indexMetadata.mapping(); + if (mmd != null) { + Map mapping = XContentHelper.convertToMap(mmd.source().uncompressed(), false).v2(); + if (mapping.size() == 1 && mapping.containsKey(mmd.type())) { // the type name is the root value, reduce it - mapping = (Map) mapping.get(cursor.key); + mapping = (Map) mapping.get(mmd.type()); } - builder.field(cursor.key); + builder.field(mmd.type()); builder.map(mapping); } builder.endObject(); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java index 66bca027d7cc4..620542f8f1bde 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java @@ -50,6 +50,7 @@ import java.io.UncheckedIOException; import java.util.Collections; import java.util.Map; +import java.util.Objects; import static org.opensearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; @@ -59,46 +60,16 @@ public class MappingMetadata extends AbstractDiffable { public static final MappingMetadata EMPTY_MAPPINGS = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap()); - public static class Routing { - - public static final Routing EMPTY = new Routing(false); - - private final boolean required; - - public Routing(boolean required) { - this.required = required; - } - - public boolean required() { - return required; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Routing routing = (Routing) o; - - return required == routing.required; - } - - @Override - public int hashCode() { - return getClass().hashCode() + (required ? 1 : 0); - } - } - private final String type; private final CompressedXContent source; - private final Routing routing; + private final boolean routingRequired; public MappingMetadata(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); - this.routing = new Routing(docMapper.routingFieldMapper().required()); + this.routingRequired = docMapper.routingFieldMapper().required(); } @SuppressWarnings("unchecked") @@ -109,7 +80,7 @@ public MappingMetadata(CompressedXContent mapping) { throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } this.type = mappingMap.keySet().iterator().next(); - this.routing = initRouting((Map) mappingMap.get(this.type)); + this.routingRequired = isRoutingRequired((Map) mappingMap.get(this.type)); } @SuppressWarnings("unchecked") @@ -125,13 +96,13 @@ public MappingMetadata(String type, Map mapping) { if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map) mapping.get(type); } - this.routing = initRouting(withoutType); + this.routingRequired = isRoutingRequired(withoutType); } @SuppressWarnings("unchecked") - private Routing initRouting(Map withoutType) { + private boolean isRoutingRequired(Map withoutType) { + boolean required = false; if (withoutType.containsKey("_routing")) { - boolean required = false; Map routingNode = (Map) withoutType.get("_routing"); for (Map.Entry entry : routingNode.entrySet()) { String fieldName = entry.getKey(); @@ -147,10 +118,8 @@ private Routing initRouting(Map withoutType) { } } } - return new Routing(required); - } else { - return Routing.EMPTY; } + return required; } public String type() { @@ -180,8 +149,8 @@ public Map getSourceAsMap() throws OpenSearchParseException { return sourceAsMap(); } - public Routing routing() { - return this.routing; + public boolean routingRequired() { + return this.routingRequired; } @Override @@ -189,7 +158,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(type()); source().writeTo(out); // routing - out.writeBoolean(routing().required()); + out.writeBoolean(routingRequired); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { out.writeBoolean(false); // hasParentField } @@ -202,7 +171,7 @@ public boolean equals(Object o) { MappingMetadata that = (MappingMetadata) o; - if (!routing.equals(that.routing)) return false; + if (!Objects.equals(this.routingRequired, that.routingRequired)) return false; if (!source.equals(that.source)) return false; if (!type.equals(that.type)) return false; @@ -211,17 +180,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - int result = type.hashCode(); - result = 31 * result + source.hashCode(); - result = 31 * result + routing.hashCode(); - return result; + return Objects.hash(type, source, routingRequired); } public MappingMetadata(StreamInput in) throws IOException { type = in.readString(); source = CompressedXContent.readCompressedString(in); // routing - routing = new Routing(in.readBoolean()); + routingRequired = in.readBoolean(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { in.readBoolean(); // hasParentField } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java index b3503f64c53f3..6e9c30877f9c2 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java @@ -880,7 +880,7 @@ public boolean routingRequired(String concreteIndex) { if (indexMetadata != null) { MappingMetadata mappingMetadata = indexMetadata.mapping(); if (mappingMetadata != null) { - return mappingMetadata.routing().required(); + return mappingMetadata.routingRequired(); } } return false; diff --git a/server/src/main/java/org/opensearch/index/mapper/MapperService.java b/server/src/main/java/org/opensearch/index/mapper/MapperService.java index 1d4e49a6e6fee..a92647929ff08 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/opensearch/index/mapper/MapperService.java @@ -32,7 +32,6 @@ package org.opensearch.index.mapper; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; @@ -416,8 +415,8 @@ public DocumentMapper merge(String type, CompressedXContent mappingSource, Merge private synchronized Map internalMerge(IndexMetadata indexMetadata, MergeReason reason) { assert reason != MergeReason.MAPPING_UPDATE_PREFLIGHT; Map map = new LinkedHashMap<>(); - for (ObjectCursor cursor : indexMetadata.getMappings().values()) { - MappingMetadata mappingMetadata = cursor.value; + MappingMetadata mappingMetadata = indexMetadata.mapping(); + if (mappingMetadata != null) { map.put(mappingMetadata.type(), mappingMetadata.source()); } return internalMerge(map, reason); diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 6cf6ad645ca00..20bb6e7060ca3 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -32,7 +32,6 @@ package org.opensearch.index.shard; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -132,8 +131,8 @@ void recoverFromLocalShards( throw new IllegalArgumentException("can't add shards from more than one index"); } IndexMetadata sourceMetadata = shards.get(0).getIndexMetadata(); - for (ObjectObjectCursor mapping : sourceMetadata.getMappings()) { - mappingUpdateConsumer.accept(mapping.value); + if (sourceMetadata.mapping() != null) { + mappingUpdateConsumer.accept(sourceMetadata.mapping()); } indexShard.mapperService().merge(sourceMetadata, MapperService.MergeReason.MAPPING_RECOVERY); // now that the mapping is merged we can validate the index sort configuration. diff --git a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java index 60de452c3149e..e22793e057c6a 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java @@ -80,13 +80,11 @@ public class TaskResultsService { public static final String TASK_INDEX = ".tasks"; - public static final String TASK_TYPE = "task"; - public static final String TASK_RESULT_INDEX_MAPPING_FILE = "task-index-mapping.json"; public static final String TASK_RESULT_MAPPING_VERSION_META_FIELD = "version"; - public static final int TASK_RESULT_MAPPING_VERSION = 3; + public static final int TASK_RESULT_MAPPING_VERSION = 3; // must match version in task-index-mapping.json /** * The backoff policy to use when saving a task result fails. The total wait @@ -115,7 +113,7 @@ public void storeResult(TaskResult taskResult, ActionListener listener) { CreateIndexRequest createIndexRequest = new CreateIndexRequest(); createIndexRequest.settings(taskResultIndexSettings()); createIndexRequest.index(TASK_INDEX); - createIndexRequest.mapping(TASK_TYPE, taskResultIndexMapping(), XContentType.JSON); + createIndexRequest.mapping(taskResultIndexMapping()); createIndexRequest.cause("auto(task api)"); client.admin().indices().create(createIndexRequest, new ActionListener() { @@ -155,7 +153,7 @@ public void onFailure(Exception e) { } private int getTaskResultMappingVersion(IndexMetadata metadata) { - MappingMetadata mappingMetadata = metadata.getMappings().get(TASK_TYPE); + MappingMetadata mappingMetadata = metadata.mapping(); if (mappingMetadata == null) { return 0; } diff --git a/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json b/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json index 76b07bf3570f2..54e9d39902f03 100644 --- a/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json +++ b/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json @@ -1,5 +1,5 @@ { - "task" : { + "_doc" : { "_meta": { "version": 3 }, diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java index a87ec461e5dc8..94bf162303127 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java @@ -79,7 +79,7 @@ public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Ex // the task really was a mapping update assertThat( indexService.mapperService().documentMapper().mappingSource(), - not(equalTo(result.resultingState.metadata().index("test").getMappings().get(MapperService.SINGLE_MAPPING_NAME).source())) + not(equalTo(result.resultingState.metadata().index("test").mapping().source())) ); // since we never committed the cluster state update, the in-memory state is unchanged assertThat(indexService.mapperService().documentMapper().mappingSource(), equalTo(currentMapping)); From b619a050bf1048d3edc7b80dd05801a89698ccf1 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 15 Mar 2022 10:31:31 -0500 Subject: [PATCH 07/12] [Remove] types based addMapping method from CreateIndexRequest and Builder (#2460) Removes the obsolete types based .addMapping method from CreateIndexRequest and CreateIndexRequestBuilder. Tests are refactored to use the new source only setMapping method. Signed-off-by: Nicholas Walter Knize --- .../admin/cluster/stats/ClusterStatsIT.java | 14 ++----- .../action/admin/indices/get/GetIndexIT.java | 8 +--- .../cluster/SpecificMasterNodesIT.java | 9 ++-- .../gateway/GatewayIndexStateIT.java | 17 +++----- .../gateway/RecoveryFromGatewayIT.java | 9 +--- .../java/org/opensearch/get/GetActionIT.java | 4 +- .../mapper/CopyToMapperIntegrationIT.java | 5 +-- .../mapping/ConcurrentDynamicTemplateIT.java | 11 ++--- .../mapping/UpdateMappingIntegrationIT.java | 12 ++---- .../RandomExceptionCircuitBreakerIT.java | 5 +-- .../indices/state/OpenCloseIndexIT.java | 5 +-- .../indices/stats/IndexStatsIT.java | 6 +-- .../routing/PartitionedRoutingIT.java | 14 +------ .../aggregations/bucket/DateHistogramIT.java | 5 +-- .../bucket/TermsDocCountErrorIT.java | 9 +--- .../basic/SearchWithRandomExceptionsIT.java | 5 +-- .../basic/SearchWithRandomIOExceptionsIT.java | 7 +--- .../highlight/HighlighterSearchIT.java | 9 +--- .../opensearch/search/geo/GeoFilterIT.java | 7 +--- .../search/geo/GeoShapeIntegrationIT.java | 14 +++---- .../geo/LegacyGeoShapeIntegrationIT.java | 10 ++--- .../search/morelikethis/MoreLikeThisIT.java | 30 ++++--------- .../search/nested/SimpleNestedIT.java | 42 ++++++++----------- .../search/query/SimpleQueryStringIT.java | 14 +------ .../search/slice/SearchSliceIT.java | 5 +-- .../opensearch/search/sort/SimpleSortIT.java | 9 +--- .../indices/create/CreateIndexRequest.java | 13 ------ .../create/CreateIndexRequestBuilder.java | 6 +-- .../rollover/RolloverRequestBuilder.java | 6 --- .../create/CreateIndexRequestTests.java | 31 +++----------- .../index/mapper/MapperServiceTests.java | 5 +-- .../search/geo/GeoShapeQueryTests.java | 18 +++----- 32 files changed, 93 insertions(+), 271 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java index 19d1728a1fecd..72f34133067ee 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -42,8 +42,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.OpenSearchExecutors; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.monitor.os.OsStats; import org.opensearch.node.NodeRoleSettings; import org.opensearch.test.OpenSearchIntegTestCase; @@ -276,19 +274,13 @@ public void testFieldTypes() { assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.GREEN)); assertTrue(response.getIndicesStats().getMappings().getFieldTypeStats().isEmpty()); - client().admin() - .indices() - .prepareCreate("test1") - .addMapping(MapperService.SINGLE_MAPPING_NAME, "{\"properties\":{\"foo\":{\"type\": \"keyword\"}}}", XContentType.JSON) - .get(); + client().admin().indices().prepareCreate("test1").setMapping("{\"properties\":{\"foo\":{\"type\": \"keyword\"}}}").get(); client().admin() .indices() .prepareCreate("test2") - .addMapping( - MapperService.SINGLE_MAPPING_NAME, + .setMapping( "{\"properties\":{\"foo\":{\"type\": \"keyword\"},\"bar\":{\"properties\":{\"baz\":{\"type\":\"keyword\"}," - + "\"eggplant\":{\"type\":\"integer\"}}}}}", - XContentType.JSON + + "\"eggplant\":{\"type\":\"integer\"}}}}}" ) .get(); response = client().admin().cluster().prepareClusterStats().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java index bbe8b616ad87e..ffc738ac98de5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java @@ -40,7 +40,6 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.IndexNotFoundException; import org.opensearch.test.OpenSearchIntegTestCase; @@ -64,12 +63,7 @@ public class GetIndexIT extends OpenSearchIntegTestCase { @Override protected void setupSuiteScopeCluster() throws Exception { - assertAcked( - prepareCreate("idx").addAlias(new Alias("alias_idx")) - .addMapping("type1", "{\"type1\":{}}", XContentType.JSON) - .setSettings(Settings.builder().put("number_of_shards", 1)) - .get() - ); + assertAcked(prepareCreate("idx").addAlias(new Alias("alias_idx")).setSettings(Settings.builder().put("number_of_shards", 1)).get()); ensureSearchable("idx"); createIndex("empty_idx"); ensureSearchable("idx", "empty_idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java index 9377fe284fce7..fc193163f75cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.discovery.MasterNotDiscoveredException; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; @@ -321,11 +320,9 @@ public void testAliasFilterValidation() { internalCluster().startDataOnlyNode(); assertAcked( - prepareCreate("test").addMapping( - "type1", - "{\"type1\" : {\"properties\" : {\"table_a\" : { \"type\" : \"nested\", " - + "\"properties\" : {\"field_a\" : { \"type\" : \"keyword\" },\"field_b\" :{ \"type\" : \"keyword\" }}}}}}", - XContentType.JSON + prepareCreate("test").setMapping( + "{\"properties\" : {\"table_a\" : { \"type\" : \"nested\", " + + "\"properties\" : {\"field_a\" : { \"type\" : \"keyword\" },\"field_b\" :{ \"type\" : \"keyword\" }}}}}" ) ); client().admin() diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java index 2138e24cc9b4c..24aff104ce837 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java @@ -57,7 +57,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.indices.IndexClosedException; @@ -483,19 +482,15 @@ public void testRecoverMissingAnalyzer() throws Exception { prepareCreate("test").setSettings( Settings.builder().put("index.analysis.analyzer.test.tokenizer", "standard").put("index.number_of_shards", "1") ) - .addMapping( - "type1", + .setMapping( "{\n" - + " \"type1\": {\n" - + " \"properties\": {\n" - + " \"field1\": {\n" - + " \"type\": \"text\",\n" - + " \"analyzer\": \"test\"\n" - + " }\n" + + " \"properties\": {\n" + + " \"field1\": {\n" + + " \"type\": \"text\",\n" + + " \"analyzer\": \"test\"\n" + " }\n" + " }\n" - + " }}", - XContentType.JSON + + " }" ) .get(); logger.info("--> indexing a simple document"); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java index 612abee7dbf5b..3c5f2828ff94f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java @@ -51,7 +51,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.Index; import org.opensearch.index.IndexService; @@ -115,16 +114,14 @@ public void testOneNodeRecoverFromGateway() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("appAccountIds") .field("type", "text") .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); client().prepareIndex("test") .setId("10990239") @@ -212,7 +209,6 @@ public void testSingleNodeNoFlush() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") @@ -222,14 +218,13 @@ public void testSingleNodeNoFlush() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test. int numberOfShards = numberOfShards(); assertAcked( prepareCreate("test").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards()).put(SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 1)) - ).addMapping("type1", mapping, XContentType.JSON) + ).setMapping(mapping) ); int value1Docs; diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java index 30cb18669ebbd..ec0b47ccd0ecf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java @@ -291,7 +291,6 @@ public void testGetDocWithMultivaluedFields() throws Exception { String mapping1 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") @@ -299,9 +298,8 @@ public void testGetDocWithMultivaluedFields() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping1, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping1)); ensureGreen(); GetResponse response = client().prepareGet("test", "1").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java index f2cc3c289e8e4..f23e319a5e8d2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java @@ -36,7 +36,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -81,7 +80,6 @@ public void testDynamicTemplateCopyTo() throws Exception { public void testDynamicObjectCopyTo() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("_doc") .startObject("properties") .startObject("foo") .field("type", "text") @@ -89,9 +87,8 @@ public void testDynamicObjectCopyTo() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(client().admin().indices().prepareCreate("test-idx").addMapping("_doc", mapping, XContentType.JSON)); + assertAcked(client().admin().indices().prepareCreate("test-idx").setMapping(mapping)); client().prepareIndex("test-idx").setId("1").setSource("foo", "bar").get(); client().admin().indices().prepareRefresh("test-idx").execute().actionGet(); SearchResponse response = client().prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java index 7dc1933575ea3..e731b0074f04d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -34,7 +34,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.index.IndexResponse; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; @@ -49,14 +48,10 @@ import static org.hamcrest.Matchers.emptyIterable; public class ConcurrentDynamicTemplateIT extends OpenSearchIntegTestCase { - private final String mappingType = "test-mapping"; - // see #3544 public void testConcurrentDynamicMapping() throws Exception { final String fieldName = "field"; - final String mapping = "{ \"" - + mappingType - + "\": {" + final String mapping = "{ " + "\"dynamic_templates\": [" + "{ \"" + fieldName @@ -65,14 +60,14 @@ public void testConcurrentDynamicMapping() throws Exception { + "\"mapping\": {" + "\"type\": \"text\"," + "\"store\": true," - + "\"analyzer\": \"whitespace\" } } } ] } }"; + + "\"analyzer\": \"whitespace\" } } } ] }"; // The 'fieldNames' array is used to help with retrieval of index terms // after testing int iters = scaledRandomIntBetween(5, 15); for (int i = 0; i < iters; i++) { cluster().wipeIndices("test"); - assertAcked(prepareCreate("test").addMapping(mappingType, mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); int numDocs = scaledRandomIntBetween(10, 100); final CountDownLatch latch = new CountDownLatch(numDocs); final List throwable = new CopyOnWriteArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java index a325bbc62f8a8..0a29794add5a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -145,7 +145,7 @@ public void testUpdateMappingWithoutType() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) - .addMapping("_doc", "{\"_doc\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -196,7 +196,7 @@ public void testUpdateMappingWithConflicts() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0)) - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -221,7 +221,7 @@ public void testUpdateMappingWithNormsConflicts() { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}") .execute() .actionGet(); try { @@ -248,11 +248,7 @@ public void testUpdateMappingNoChanges() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0)) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, - "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", - XContentType.JSON - ) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 3d907bcaf3198..341c0a965f94e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -50,7 +50,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.query.QueryBuilders; import org.opensearch.indices.IndicesService; @@ -104,7 +103,6 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test-str") .field("type", "keyword") @@ -115,7 +113,6 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) .endObject() // test-num .endObject() // properties - .endObject() // type .endObject() ); final double topLevelRate; @@ -149,7 +146,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .indices() .prepareCreate("test") .setSettings(settings) - .addMapping("type", mapping, XContentType.JSON) + .setMapping(mapping) .execute() .actionGet(); final int numDocs; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java index b8baa35507892..ca1e1399f8fdc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java @@ -47,7 +47,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestStatus; @@ -305,17 +304,15 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", mapping, XContentType.JSON)); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping)); ensureGreen(); int docs = between(10, 100); IndexRequestBuilder[] builder = new IndexRequestBuilder[docs]; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index 07c8471e360f6..c503dd9f83273 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -1004,11 +1004,9 @@ public void testMultiIndex() throws Exception { public void testCompletionFieldsParam() throws Exception { assertAcked( - prepareCreate("test1").addMapping( - "_doc", + prepareCreate("test1").setMapping( "{ \"properties\": { \"bar\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}" - + ",\"baz\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}}}", - XContentType.JSON + + ",\"baz\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}}}" ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java index 99742166cda7f..a64e857f089f0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java @@ -36,8 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; import org.mockito.internal.util.collections.Sets; @@ -63,11 +61,7 @@ public void testVariousPartitionSizes() throws Exception { .put("index.number_of_routing_shards", shards) .put("index.routing_partition_size", partitionSize) ) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, - "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"_routing\":{\"required\":true}}}", - XContentType.JSON - ) + .setMapping("{\"_routing\":{\"required\":true}}") .execute() .actionGet(); ensureGreen(); @@ -101,11 +95,7 @@ public void testShrinking() throws Exception { .put("index.number_of_replicas", numberOfReplicas()) .put("index.routing_partition_size", partitionSize) ) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, - "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"_routing\":{\"required\":true}}}", - XContentType.JSON - ) + .setMapping("{\"_routing\":{\"required\":true}}}") .execute() .actionGet(); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index 971afdd20e1fa..2c095857089e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; import org.opensearch.common.time.DateMathParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.index.query.MatchNoneQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -1316,7 +1315,6 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception { String mappingJson = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("date") .field("type", "date") @@ -1324,9 +1322,8 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).get(); + prepareCreate("idx2").setMapping(mappingJson).get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; for (int i = 0; i < reqs.length; i++) { reqs[i] = client().prepareIndex("idx2") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index c21f78c5e942d..9b941860177bb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.Terms.Bucket; @@ -110,13 +109,7 @@ public void setupSuiteScopeCluster() throws Exception { ); } numRoutingValues = between(1, 40); - assertAcked( - prepareCreate("idx_with_routing").addMapping( - "type", - "{ \"type\" : { \"_routing\" : { \"required\" : true } } }", - XContentType.JSON - ) - ); + assertAcked(prepareCreate("idx_with_routing").setMapping("{ \"_routing\" : { \"required\" : true } }")); for (int i = 0; i < numDocs; i++) { builders.add( client().prepareIndex("idx_single_shard") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index ed7f764c798e5..9efb07fc7e581 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -50,7 +50,6 @@ import org.opensearch.common.settings.Settings.Builder; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; @@ -85,14 +84,12 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); final double lowLevelRate; final double topLevelRate; @@ -121,7 +118,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe .put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate) .put(MockEngineSupport.WRAP_READER_RATIO.getKey(), 1.0d); logger.info("creating index: [test] using settings: [{}]", settings.build()); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(mapping)); ensureSearchable(); final int numDocs = between(10, 100); int numCreated = 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index f35d07d6d513c..094ab8a19c88b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -46,7 +46,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.search.sort.SortOrder; @@ -73,14 +72,12 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); final double exceptionRate; final double exceptionOnOpenRate; @@ -108,7 +105,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc if (createIndexWithoutErrors) { Settings.Builder settings = Settings.builder().put("index.number_of_replicas", numberOfReplicas()); logger.info("creating index: [test] using settings: [{}]", settings.build()); - client().admin().indices().prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setSettings(settings).setMapping(mapping).get(); numInitialDocs = between(10, 100); ensureGreen(); for (int i = 0; i < numInitialDocs; i++) { @@ -134,7 +131,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc // we cannot expect that the index will be valid .put(MockFSDirectoryFactory.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate); logger.info("creating index: [test] using settings: [{}]", settings.build()); - client().admin().indices().prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setSettings(settings).setMapping(mapping).get(); } ClusterHealthResponse clusterHealthResponse = client().admin() .cluster() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index de2926cadc032..f0fe5e4479b76 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -51,7 +51,6 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.analysis.AbstractIndexAnalyzerProvider; import org.opensearch.index.analysis.AnalyzerProvider; import org.opensearch.index.analysis.PreConfiguredTokenFilter; @@ -3292,7 +3291,6 @@ public void testKeywordFieldHighlighting() throws IOException { public void testACopyFieldWithNestedQuery() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "nested") @@ -3310,9 +3308,8 @@ public void testACopyFieldWithNestedQuery() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); + prepareCreate("test").setMapping(mapping).get(); client().prepareIndex("test") .setId("1") @@ -3424,7 +3421,6 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { public void testWithNestedQuery() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("text") .field("type", "text") @@ -3441,9 +3437,8 @@ public void testWithNestedQuery() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); + prepareCreate("test").setMapping(mapping).get(); client().prepareIndex("test") .setId("1") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index d899451660cb7..8322c9704eecb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -214,7 +214,6 @@ public void testShapeRelations() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("polygon") .startObject("properties") .startObject("area") .field("type", "geo_shape") @@ -222,13 +221,9 @@ public void testShapeRelations() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("shapes") - .addMapping("polygon", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes").setMapping(mapping); mappingRequest.get(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index 2db5973a2aa85..7315155e39520 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -76,7 +76,6 @@ public void testOrientationPersistence() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -84,16 +83,14 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // create index - assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName).setMapping(mapping)); mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -101,10 +98,9 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate(idxName + "2").addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName + "2").setMapping(mapping)); ensureGreen(idxName, idxName + "2"); internalCluster().fullRestart(); @@ -227,7 +223,7 @@ public void testIndexShapeRouting() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping).get()); ensureGreen(); String source = "{\n" @@ -265,10 +261,10 @@ public void testIndexPolygonDateLine() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("vector").addMapping("doc", mappingVector, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("vector").setMapping(mappingVector).get()); ensureGreen(); - assertAcked(client().admin().indices().prepareCreate("quad").addMapping("doc", mappingQuad, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("quad").setMapping(mappingQuad).get()); ensureGreen(); String source = "{\n" + " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\"" + "}"; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index 479fd00e5e08b..28b00acd21479 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -68,7 +68,6 @@ public void testOrientationPersistence() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -77,16 +76,14 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // create index - assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName).setMapping(mapping)); mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -95,10 +92,9 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate(idxName + "2").addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName + "2").setMapping(mapping)); ensureGreen(idxName, idxName + "2"); internalCluster().fullRestart(); @@ -205,7 +201,7 @@ public void testIndexShapeRouting() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping).get()); ensureGreen(); String source = "{\n" diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index f5a2b76b89213..7ffd648d06611 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -42,7 +42,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.MoreLikeThisQueryBuilder; import org.opensearch.index.query.MoreLikeThisQueryBuilder.Item; @@ -283,12 +282,9 @@ public void testMoreLikeThisWithAliases() throws Exception { public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { String indexName = "foo"; String aliasName = "foo_name"; - String typeName = "bar"; - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate(indexName).addMapping(typeName, mapping, XContentType.JSON).get(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate(indexName).setMapping(mapping).get(); client().admin().indices().prepareAliases().addAlias(indexName, aliasName).get(); assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); @@ -309,10 +305,8 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { } public void testMoreLikeThisIssue2197() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).get(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate("foo").setMapping(mapping).get(); client().prepareIndex("foo") .setId("1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) @@ -332,10 +326,8 @@ public void testMoreLikeThisIssue2197() throws Exception { // Issue #2489 public void testMoreLikeWithCustomRouting() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).get(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate("foo").setMapping(mapping).get(); ensureGreen(); client().prepareIndex("foo") @@ -354,14 +346,10 @@ public void testMoreLikeWithCustomRouting() throws Exception { // Issue #3039 public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); assertAcked( - prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)).addMapping( - "bar", - mapping, - XContentType.JSON + prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)).setMapping( + mapping ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index e4ad46c7599fe..c6c58e6fcb6a5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -719,25 +719,22 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { public void testNestedSortWithMultiLevelFiltering() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "{\n" - + " \"type1\": {\n" - + " \"properties\": {\n" - + " \"acl\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"access_id\": {\"type\": \"keyword\"},\n" - + " \"operation\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"name\": {\"type\": \"keyword\"},\n" - + " \"user\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"username\": {\"type\": \"keyword\"},\n" - + " \"id\": {\"type\": \"integer\"}\n" - + " }\n" + + " \"properties\": {\n" + + " \"acl\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"access_id\": {\"type\": \"keyword\"},\n" + + " \"operation\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\"type\": \"keyword\"},\n" + + " \"user\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"username\": {\"type\": \"keyword\"},\n" + + " \"id\": {\"type\": \"integer\"}\n" + " }\n" + " }\n" + " }\n" @@ -745,8 +742,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { + " }\n" + " }\n" + " }\n" - + "}", - XContentType.JSON + + "}" ) ); ensureGreen(); @@ -965,8 +961,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { public void testLeakingSortValues() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put("number_of_shards", 1)) - .addMapping( - "test-type", + .setMapping( "{\n" + " \"dynamic\": \"strict\",\n" + " \"properties\": {\n" @@ -987,8 +982,7 @@ public void testLeakingSortValues() throws Exception { + " }\n" + " }\n" + " }\n" - + " }\n", - XContentType.JSON + + " }\n" ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index 6bd4eec37407f..c53eda63f155f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -379,7 +379,6 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "text") @@ -387,13 +386,9 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("test1") - .addMapping("type1", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping); mappingRequest.get(); indexRandom(true, client().prepareIndex("test1").setId("1").setSource("location", "Köln")); refresh(); @@ -431,7 +426,6 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("body") .field("type", "text") @@ -439,13 +433,9 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("test1") - .addMapping("type1", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping); mappingRequest.get(); indexRandom(true, client().prepareIndex("test1").setId("1").setSource("body", "Some Text")); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index c4697e63cb4f7..9c735c42052e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.Scroll; import org.opensearch.search.SearchException; import org.opensearch.search.SearchHit; @@ -67,7 +66,6 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("invalid_random_kw") .field("type", "keyword") @@ -83,14 +81,13 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe .endObject() .endObject() .endObject() - .endObject() ); assertAcked( client().admin() .indices() .prepareCreate("test") .setSettings(Settings.builder().put("number_of_shards", numberOfShards).put("index.max_slices_per_scroll", 10000)) - .addMapping("type", mapping, XContentType.JSON) + .setMapping(mapping) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index b4f511c3be123..70bb24532aa7d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -38,7 +38,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.GeoUtils; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -243,7 +242,6 @@ public void testSimpleSorts() throws Exception { public void testSortMinValueScript() throws IOException { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("lvalue") .field("type", "long") @@ -259,10 +257,9 @@ public void testSortMinValueScript() throws IOException { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); for (int i = 0; i < 10; i++) { @@ -359,7 +356,6 @@ public void testDocumentsWithNullValue() throws Exception { // be propagated to all nodes yet and sort operation fail when the sort field is not defined String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("id") .field("type", "keyword") @@ -369,9 +365,8 @@ public void testDocumentsWithNullValue() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); client().prepareIndex("test").setSource(jsonBuilder().startObject().field("id", "1").field("svalue", "aaa").endObject()).get(); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index dd8fcdec1ddf8..8b38308d39c93 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -245,19 +245,6 @@ public CreateIndexRequest mapping(String mapping) { return this; } - /** - * Adds mapping that will be added when the index gets created. - * - * @param type The mapping type - * @param source The mapping source - * @param xContentType The content type of the source - * @deprecated types are being removed - */ - @Deprecated - public CreateIndexRequest mapping(String type, String source, XContentType xContentType) { - return mapping(type, new BytesArray(source), xContentType); - } - /** * Adds mapping that will be added when the index gets created. * diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 94fec1d2a08f2..77e48d079cb5c 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -111,13 +111,11 @@ public CreateIndexRequestBuilder setSettings(Map source) { /** * Adds mapping that will be added when the index gets created. * - * @param type The mapping type * @param source The mapping source - * @param xContentType The content type of the source */ @Deprecated - public CreateIndexRequestBuilder addMapping(String type, String source, XContentType xContentType) { - request.mapping(type, source, xContentType); + public CreateIndexRequestBuilder setMapping(String source) { + request.mapping(source); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java index 6f631e7c086a9..a7af2f963d15b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java @@ -38,7 +38,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.XContentType; public class RolloverRequestBuilder extends MasterNodeOperationRequestBuilder { public RolloverRequestBuilder(OpenSearchClient client, RolloverAction action) { @@ -90,11 +89,6 @@ public RolloverRequestBuilder mapping(String type, Object... source) { return this; } - public RolloverRequestBuilder mapping(String type, String source, XContentType xContentType) { - this.request.getCreateIndexRequest().mapping(type, source, xContentType); - return this; - } - /** * Sets the number of shard copies that should be active for creation of the * new rollover index to return. Defaults to {@link ActiveShardCount#DEFAULT}, which will diff --git a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java index 472d389a23890..320db79428300 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -35,7 +35,6 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.common.Strings; -import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.xcontent.LoggingDeprecationHandler; @@ -45,6 +44,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.index.mapper.MapperService; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; @@ -57,8 +57,10 @@ public class CreateIndexRequestTests extends OpenSearchTestCase { public void testSerialization() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("my_type").endObject().endObject()); - request.mapping("my_type", mapping, XContentType.JSON); + String mapping = Strings.toString( + JsonXContent.contentBuilder().startObject().startObject(MapperService.SINGLE_MAPPING_NAME).endObject().endObject() + ); + request.mapping(mapping); try (BytesStreamOutput output = new BytesStreamOutput()) { request.writeTo(output); @@ -135,29 +137,6 @@ public void testMappingKeyedByType() throws IOException { request2.mapping("type1", builder); assertEquals(request1.mappings(), request2.mappings()); } - { - request1 = new CreateIndexRequest("foo"); - request2 = new CreateIndexRequest("bar"); - String nakedMapping = "{\"properties\": {\"foo\": {\"type\": \"integer\"}}}"; - request1.mapping("type2", nakedMapping, XContentType.JSON); - request2.mapping("type2", "{\"type2\": " + nakedMapping + "}", XContentType.JSON); - assertEquals(request1.mappings(), request2.mappings()); - } - { - request1 = new CreateIndexRequest("foo"); - request2 = new CreateIndexRequest("bar"); - Map nakedMapping = MapBuilder.newMapBuilder() - .put( - "properties", - MapBuilder.newMapBuilder() - .put("bar", MapBuilder.newMapBuilder().put("type", "scaled_float").put("scaling_factor", 100).map()) - .map() - ) - .map(); - request1.mapping("type3", nakedMapping); - request2.mapping("type3", MapBuilder.newMapBuilder().put("type3", nakedMapping).map()); - assertEquals(request1.mappings(), request2.mappings()); - } } public void testSettingsType() throws IOException { diff --git a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java index eae52efa391a1..b58c0bf69c298 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java @@ -40,7 +40,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.Environment; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; @@ -175,7 +174,7 @@ public void testPartitionedConstraints() { client().admin() .indices() .prepareCreate("test-index") - .addMapping("type", "{\"type\":{}}", XContentType.JSON) + .setMapping("{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{}}") .setSettings(Settings.builder().put("index.number_of_shards", 4).put("index.routing_partition_size", 2)) .execute() .actionGet(); @@ -187,7 +186,7 @@ public void testPartitionedConstraints() { client().admin() .indices() .prepareCreate("test-index") - .addMapping("type", "{\"type\":{\"_routing\":{\"required\":true}}}", XContentType.JSON) + .setMapping("{\"_routing\":{\"required\":true}}") .setSettings(Settings.builder().put("index.number_of_shards", 4).put("index.routing_partition_size", 2)) .execute() .actionGet() diff --git a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java index eb8cc7e6113e1..1722cb564e231 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java @@ -432,16 +432,14 @@ public void testGeometryCollectionRelations() throws Exception { public void testEdgeCases() throws Exception { XContentBuilder xcb = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("geo") .field("type", "geo_shape") .endObject() .endObject() - .endObject() .endObject(); String mapping = Strings.toString(xcb); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); ensureGreen(); client().prepareIndex("test") @@ -629,7 +627,6 @@ public void testPointsOnly() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -640,10 +637,9 @@ public void testPointsOnly() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("geo_points_only").setMapping(mapping).get(); ensureGreen(); ShapeBuilder shape = RandomShapeGenerator.createShape(random()); @@ -669,7 +665,6 @@ public void testPointsOnlyExplicit() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("geo") .field("type", "geo_shape") @@ -680,10 +675,9 @@ public void testPointsOnlyExplicit() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("geo_points_only").setMapping(mapping).get(); ensureGreen(); // MULTIPOINT @@ -710,7 +704,7 @@ public void testPointsOnlyExplicit() throws Exception { public void testIndexedShapeReference() throws Exception { String mapping = Strings.toString(createDefaultMapping()); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); createIndex("shapes"); ensureGreen(); @@ -907,7 +901,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { public void testDistanceQuery() throws Exception { String mapping = Strings.toString(createRandomMapping()); - client().admin().indices().prepareCreate("test_distance").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test_distance").setMapping(mapping).get(); ensureGreen(); CircleBuilder circleBuilder = new CircleBuilder().center(new Coordinate(1, 0)).radius(350, DistanceUnit.KILOMETERS); @@ -950,7 +944,7 @@ public void testDistanceQuery() throws Exception { public void testIndexRectangleSpanningDateLine() throws Exception { String mapping = Strings.toString(createRandomMapping()); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); ensureGreen(); EnvelopeBuilder envelopeBuilder = new EnvelopeBuilder(new Coordinate(178, 10), new Coordinate(-178, -10)); From 12dd5d76b5c85b7abea05a9c329183ddabb5f602 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 15 Mar 2022 11:43:20 -0400 Subject: [PATCH 08/12] repository-azure: revert the fix for https://github.com/opensearch-project/OpenSearch/issues/1734 once upstream solution is available (#2446) Signed-off-by: Andriy Redko --- plugins/repository-azure/build.gradle | 14 +- .../licenses/azure-core-1.22.0.jar.sha1 | 1 - .../licenses/azure-core-1.26.0.jar.sha1 | 1 + .../azure-core-http-netty-1.11.7.jar.sha1 | 1 - .../azure-core-http-netty-1.11.8.jar.sha1 | 1 + .../azure-storage-blob-12.14.1.jar.sha1 | 1 - .../azure-storage-blob-12.14.4.jar.sha1 | 1 + .../azure-storage-common-12.14.3.jar.sha1 | 1 - .../azure-storage-common-12.15.0.jar.sha1 | 1 + .../licenses/reactor-netty-1.0.13.jar.sha1 | 1 - .../licenses/reactor-netty-1.0.16.jar.sha1 | 1 + .../reactor-netty-core-1.0.13.jar.sha1 | 1 - .../reactor-netty-core-1.0.16.jar.sha1 | 1 + .../reactor-netty-http-1.0.13.jar.sha1 | 1 - .../reactor-netty-http-1.0.16.jar.sha1 | 1 + .../repositories/azure/AzureBlobStore.java | 181 ++++++------------ 16 files changed, 73 insertions(+), 136 deletions(-) delete mode 100644 plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 3dc089ef8acb7..60fb99f459454 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -44,9 +44,9 @@ opensearchplugin { } dependencies { - api 'com.azure:azure-core:1.22.0' - api 'com.azure:azure-storage-common:12.14.3' - api 'com.azure:azure-core-http-netty:1.11.7' + api 'com.azure:azure-core:1.26.0' + api 'com.azure:azure-storage-common:12.15.0' + api 'com.azure:azure-core-http-netty:1.11.8' api "io.netty:netty-codec-dns:${versions.netty}" api "io.netty:netty-codec-socks:${versions.netty}" api "io.netty:netty-codec-http2:${versions.netty}" @@ -54,12 +54,12 @@ dependencies { api "io.netty:netty-resolver-dns:${versions.netty}" api "io.netty:netty-transport-native-unix-common:${versions.netty}" implementation project(':modules:transport-netty4') - api 'com.azure:azure-storage-blob:12.14.1' + api 'com.azure:azure-storage-blob:12.14.4' api 'org.reactivestreams:reactive-streams:1.0.3' api 'io.projectreactor:reactor-core:3.4.15' - api 'io.projectreactor.netty:reactor-netty:1.0.13' - api 'io.projectreactor.netty:reactor-netty-core:1.0.13' - api 'io.projectreactor.netty:reactor-netty-http:1.0.13' + api 'io.projectreactor.netty:reactor-netty:1.0.16' + api 'io.projectreactor.netty:reactor-netty-core:1.0.16' + api 'io.projectreactor.netty:reactor-netty-http:1.0.16' api "org.slf4j:slf4j-api:${versions.slf4j}" api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" diff --git a/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 deleted file mode 100644 index f57b83e5d9715..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -194b21b804c20c85f7d2a6199280075f6747e188 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 new file mode 100644 index 0000000000000..693c6a721959c --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 @@ -0,0 +1 @@ +461b89dcf8948a0c4a97d4f1d876f778d0cac7aa \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 deleted file mode 100644 index 25db85393f2af..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c6b14fcca3e75acc8dbe07ac101afd05d48a1647 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 new file mode 100644 index 0000000000000..df7d7ae4ce285 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 @@ -0,0 +1 @@ +0ea66d4531fb41cb3b5ab55e2e7b7f301e7f8503 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 deleted file mode 100644 index d9c6f462089e3..0000000000000 --- a/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -384763aef32d779ee22ef3faa03049fee7e0f6de \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 new file mode 100644 index 0000000000000..5333f8fa90ada --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 @@ -0,0 +1 @@ +2b92020693d09e4980b96d278e8038a1087afea0 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 deleted file mode 100644 index b7cb4342c014c..0000000000000 --- a/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e8d6258aa8bf1594980c01294e60de74d13a815f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 new file mode 100644 index 0000000000000..1f3adfc161c7f --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 @@ -0,0 +1 @@ +4d63ce8bbd20379c5e5262b1204ceac7b31a7743 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 deleted file mode 100644 index be6cfc229b9b2..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cf216a9ba6b50210664761add9db744c9c3f51d8 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..582380e449a1d --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 @@ -0,0 +1 @@ +d90829f6127966b0c35c4a3e8e23ca9ed29cd8a5 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 deleted file mode 100644 index 8f81861f48dde..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a67949c5946dd66c7ab0a3b059213c23345c32b1 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..0d1a0cb20c80f --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 @@ -0,0 +1 @@ +8f842a912677f2bc614ff60fb9e786d4fa429c34 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 deleted file mode 100644 index e6b4cb0b9a4e8..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -de7a38101098db9438c18fdd09acc5b79a2ec02a \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..d737315b06b62 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 @@ -0,0 +1 @@ +93edb9a1dc774d843551a616e0f316e11ffa81ed \ No newline at end of file diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java index 753c902a6eb01..b540dd83c95a2 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java @@ -35,7 +35,6 @@ import com.azure.core.http.HttpMethod; import com.azure.core.http.HttpRequest; import com.azure.core.http.HttpResponse; -import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.storage.blob.BlobClient; @@ -52,7 +51,6 @@ import com.azure.storage.blob.options.BlobParallelUploadOptions; import com.azure.storage.common.implementation.Constants; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.util.Throwables; @@ -84,7 +82,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicLong; @@ -220,71 +217,50 @@ public DeleteResult deleteBlobDirectory(String path, Executor executor) throws U final ListBlobsOptions listBlobsOptions = new ListBlobsOptions().setPrefix(path); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed. - final Optional> pageOpt = blobContainer.listBlobs(listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; - } - - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping prefixes as those are not deletable and should not be there - assert (blobItem.isPrefix() == null || !blobItem.isPrefix()) : "Only blobs (not prefixes) are expected"; - - outstanding.incrementAndGet(); - executor.execute(new AbstractRunnable() { - @Override - protected void doRun() throws Exception { - final long len = blobItem.getProperties().getContentLength(); - - final BlobClient azureBlob = blobContainer.getBlobClient(blobItem.getName()); - logger.trace( - () -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blobItem.getName()) - ); - final Response response = azureBlob.deleteWithResponse(null, null, timeout(), client.v2().get()); - logger.trace( - () -> new ParameterizedMessage( - "container [{}]: blob [{}] deleted status [{}].", - container, - blobItem.getName(), - response.getStatusCode() - ) - ); - - blobsDeleted.incrementAndGet(); - if (len >= 0) { - bytesDeleted.addAndGet(len); - } + for (final BlobItem blobItem : blobContainer.listBlobs(listBlobsOptions, timeout())) { + // Skipping prefixes as those are not deletable and should not be there + assert (blobItem.isPrefix() == null || !blobItem.isPrefix()) : "Only blobs (not prefixes) are expected"; + + outstanding.incrementAndGet(); + executor.execute(new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + final long len = blobItem.getProperties().getContentLength(); + + final BlobClient azureBlob = blobContainer.getBlobClient(blobItem.getName()); + logger.trace( + () -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blobItem.getName()) + ); + final Response response = azureBlob.deleteWithResponse(null, null, timeout(), client.v2().get()); + logger.trace( + () -> new ParameterizedMessage( + "container [{}]: blob [{}] deleted status [{}].", + container, + blobItem.getName(), + response.getStatusCode() + ) + ); + + blobsDeleted.incrementAndGet(); + if (len >= 0) { + bytesDeleted.addAndGet(len); } + } - @Override - public void onFailure(Exception e) { - exceptions.add(e); - } + @Override + public void onFailure(Exception e) { + exceptions.add(e); + } - @Override - public void onAfter() { - if (outstanding.decrementAndGet() == 0) { - result.onResponse(null); - } + @Override + public void onAfter() { + if (outstanding.decrementAndGet() == 0) { + result.onResponse(null); } - }); - } - - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + } + }); + } }); - if (outstanding.decrementAndGet() == 0) { result.onResponse(null); } @@ -325,39 +301,19 @@ public Map listBlobsByPrefix(String keyPath, String prefix .setPrefix(keyPath + (prefix == null ? "" : prefix)); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed - final Optional> pageOpt = blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; + for (final BlobItem blobItem : blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout())) { + // Skipping over the prefixes, only look for the blobs + if (blobItem.isPrefix() != null && blobItem.isPrefix()) { + continue; } - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping over the prefixes, only look for the blobs - if (blobItem.isPrefix() != null && blobItem.isPrefix()) { - continue; - } + final String name = getBlobName(blobItem.getName(), container, keyPath); + logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - final String name = getBlobName(blobItem.getName(), container, keyPath); - logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - - final BlobItemProperties properties = blobItem.getProperties(); - logger.trace(() -> new ParameterizedMessage("blob name [{}], size [{}]", name, properties.getContentLength())); - blobsBuilder.put(name, new PlainBlobMetadata(name, properties.getContentLength())); - } - - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + final BlobItemProperties properties = blobItem.getProperties(); + logger.trace(() -> new ParameterizedMessage("blob name [{}], size [{}]", name, properties.getContentLength())); + blobsBuilder.put(name, new PlainBlobMetadata(name, properties.getContentLength())); + } }); return MapBuilder.newMapBuilder(blobsBuilder).immutableMap(); @@ -373,36 +329,17 @@ public Map children(BlobPath path) throws URISyntaxExcept .setPrefix(keyPath); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed - final Optional> pageOpt = blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; - } - - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping over the blobs, only look for prefixes - if (blobItem.isPrefix() != null && blobItem.isPrefix()) { - // Expecting name in the form /container/keyPath.* and we want to strip off the /container/ - // this requires 1 + container.length() + 1, with each 1 corresponding to one of the /. - // Lastly, we add the length of keyPath to the offset to strip this container's path. - final String name = getBlobName(blobItem.getName(), container, keyPath).replaceAll("/$", ""); - logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - blobsBuilder.add(name); - } + for (final BlobItem blobItem : blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout())) { + // Skipping over the blobs, only look for prefixes + if (blobItem.isPrefix() != null && blobItem.isPrefix()) { + // Expecting name in the form /container/keyPath.* and we want to strip off the /container/ + // this requires 1 + container.length() + 1, with each 1 corresponding to one of the /. + // Lastly, we add the length of keyPath to the offset to strip this container's path. + final String name = getBlobName(blobItem.getName(), container, keyPath).replaceAll("/$", ""); + logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); + blobsBuilder.add(name); } - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + } }); return Collections.unmodifiableMap( From b69dc335ad4cfaf421abaca9e921fcf08c1d45d8 Mon Sep 17 00:00:00 2001 From: "Kyle J. Davis" Date: Tue, 15 Mar 2022 09:56:00 -0600 Subject: [PATCH 09/12] Add trademark notice (#2473) * adds notice to README Signed-off-by: Kyle Davis * adds trademark heading Signed-off-by: Kyle Davis --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index e4bdb4d85b632..ec9cae6e432b2 100644 --- a/README.md +++ b/README.md @@ -39,3 +39,9 @@ This project is licensed under the [Apache v2.0 License](LICENSE.txt). ## Copyright Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details. + +## Trademark + +OpenSearch is a registered trademark of Amazon Web Services. + +OpenSearch includes certain Apache-licensed Elasticsearch code from Elasticsearch B.V. and other source code. Elasticsearch B.V. is not the source of that other source code. ELASTICSEARCH is a registered trademark of Elasticsearch B.V. \ No newline at end of file From 757abdb9a0d22682cac537b32d19e22ee47f0e33 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 15 Mar 2022 13:13:54 -0500 Subject: [PATCH 10/12] [Refactor] LuceneChangesSnapshot to use accurate ops history (#2452) Improves the LuceneChangesSnapshot to get an accurate count of recovery operations using sort by sequence number optimization. Signed-off-by: Nicholas Walter Knize --- .../20_missing_field.yml | 4 ++ .../opensearch/index/shard/IndexShardIT.java | 2 +- .../org/opensearch/index/engine/Engine.java | 18 ++++++- .../index/engine/InternalEngine.java | 26 +++++++++- .../index/engine/LuceneChangesSnapshot.java | 50 ++++++++++++++----- .../index/engine/ReadOnlyEngine.java | 15 +++++- .../opensearch/index/shard/IndexShard.java | 27 ++++++++-- .../index/shard/PrimaryReplicaSyncer.java | 2 +- .../recovery/RecoverySourceHandler.java | 27 ++++++---- .../indices/recovery/RecoveryTarget.java | 10 ++-- .../index/engine/InternalEngineTests.java | 8 ++- .../engine/LuceneChangesSnapshotTests.java | 32 +++++++----- .../IndexLevelReplicationTests.java | 6 +-- .../indices/recovery/RecoveryTests.java | 2 +- .../index/engine/EngineTestCase.java | 4 +- 15 files changed, 174 insertions(+), 59 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml index 2f15334f882a9..a36f807e63e0e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml @@ -1,5 +1,9 @@ --- "Return empty object if field doesn't exist, but index does": + - skip: + version: "all" + reason: "AwaitsFix https://github.com/opensearch-project/OpenSearch/issues/2440" + - do: indices.create: index: test_index diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java index 0e915577dc467..efc522a1f9741 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java @@ -764,7 +764,7 @@ public void testShardChangesWithDefaultDocType() throws Exception { } IndexShard shard = indexService.getShard(0); try ( - Translog.Snapshot luceneSnapshot = shard.newChangesSnapshot("test", 0, numOps - 1, true); + Translog.Snapshot luceneSnapshot = shard.newChangesSnapshot("test", 0, numOps - 1, true, randomBoolean()); Translog.Snapshot translogSnapshot = getTranslog(shard).newSnapshot() ) { List opsFromLucene = TestTranslog.drainSnapshot(luceneSnapshot, true); diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index 7cf7b3245c0e5..825d71d6d1024 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -735,8 +735,22 @@ public enum SearcherScope { * Creates a new history snapshot from Lucene for reading operations whose seqno in the requesting seqno range (both inclusive). * This feature requires soft-deletes enabled. If soft-deletes are disabled, this method will throw an {@link IllegalStateException}. */ - public abstract Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) - throws IOException; + public abstract Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException; + + /** + * Counts the number of history operations in the given sequence number range + * @param source source of the request + * @param fromSeqNo from sequence number; included + * @param toSeqNumber to sequence number; included + * @return number of history operations + */ + public abstract int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNumber) throws IOException; public abstract boolean hasCompleteOperationHistory(String reason, long startingSeqNo); diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 438bb0b290b9c..1c5f06e85cb88 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -2772,7 +2772,13 @@ long getNumDocUpdates() { } @Override - public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { + public Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException { ensureOpen(); refreshIfNeeded(source, toSeqNo); Searcher searcher = acquireSearcher(source, SearcherScope.INTERNAL); @@ -2782,7 +2788,8 @@ public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long LuceneChangesSnapshot.DEFAULT_BATCH_SIZE, fromSeqNo, toSeqNo, - requiredFullRange + requiredFullRange, + accurateCount ); searcher = null; return snapshot; @@ -2798,6 +2805,21 @@ public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long } } + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + ensureOpen(); + refreshIfNeeded(source, toSeqNo); + try (Searcher s = acquireSearcher(source, SearcherScope.INTERNAL)) { + return LuceneChangesSnapshot.countNumberOfHistoryOperations(s, fromSeqNo, toSeqNo); + } catch (IOException e) { + try { + maybeFailEngine(source, e); + } catch (Exception innerException) { + e.addSuppressed(innerException); + } + throw e; + } + } + public boolean hasCompleteOperationHistory(String reason, long startingSeqNo) { return getMinRetainedSeqNo() <= startingSeqNo; } diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index d640cf1468ec3..ae1dc9e647073 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -38,16 +38,19 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.util.ArrayUtil; +import org.opensearch.Version; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.lucene.Lucene; +import org.opensearch.common.lucene.search.Queries; import org.opensearch.core.internal.io.IOUtils; import org.opensearch.index.fieldvisitor.FieldsVisitor; import org.opensearch.index.mapper.SeqNoFieldMapper; @@ -88,8 +91,14 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { * @param toSeqNo the maximum requesting seq# - inclusive * @param requiredFullRange if true, the snapshot will strictly check for the existence of operations between fromSeqNo and toSeqNo */ - LuceneChangesSnapshot(Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, long toSeqNo, boolean requiredFullRange) - throws IOException { + LuceneChangesSnapshot( + Engine.Searcher engineSearcher, + int searchBatchSize, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException { if (fromSeqNo < 0 || toSeqNo < 0 || fromSeqNo > toSeqNo) { throw new IllegalArgumentException("Invalid range; from_seqno [" + fromSeqNo + "], to_seqno [" + toSeqNo + "]"); } @@ -111,7 +120,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.indexSearcher = new IndexSearcher(Lucene.wrapAllDocsLive(engineSearcher.getDirectoryReader())); this.indexSearcher.setQueryCache(null); this.parallelArray = new ParallelArray(this.searchBatchSize); - final TopDocs topDocs = searchOperations(null); + final TopDocs topDocs = searchOperations(null, accurateCount); this.totalHits = Math.toIntExact(topDocs.totalHits.value); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); @@ -187,7 +196,7 @@ private int nextDocIndex() throws IOException { // we have processed all docs in the current search - fetch the next batch if (docIndex == scoreDocs.length && docIndex > 0) { final ScoreDoc prev = scoreDocs[scoreDocs.length - 1]; - scoreDocs = searchOperations(prev).scoreDocs; + scoreDocs = searchOperations((FieldDoc) prev, false).scoreDocs; fillParallelArray(scoreDocs, parallelArray); docIndex = 0; } @@ -236,16 +245,31 @@ private void fillParallelArray(ScoreDoc[] scoreDocs, ParallelArray parallelArray } } - private TopDocs searchOperations(ScoreDoc after) throws IOException { - final Query rangeQuery = new BooleanQuery.Builder().add( - LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo), - BooleanClause.Occur.MUST - ) - // exclude non-root nested documents - .add(new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME), BooleanClause.Occur.MUST) + private static Query operationsRangeQuery(long fromSeqNo, long toSeqNo) { + return new BooleanQuery.Builder().add(LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, fromSeqNo, toSeqNo), BooleanClause.Occur.MUST) + .add(Queries.newNonNestedFilter(Version.CURRENT), BooleanClause.Occur.MUST) // exclude non-root nested docs .build(); + } + + static int countNumberOfHistoryOperations(Engine.Searcher searcher, long fromSeqNo, long toSeqNo) throws IOException { + if (fromSeqNo > toSeqNo || fromSeqNo < 0 || toSeqNo < 0) { + throw new IllegalArgumentException("Invalid sequence range; fromSeqNo [" + fromSeqNo + "] toSeqNo [" + toSeqNo + "]"); + } + IndexSearcher indexSearcher = new IndexSearcher(Lucene.wrapAllDocsLive(searcher.getDirectoryReader())); + return indexSearcher.count(operationsRangeQuery(fromSeqNo, toSeqNo)); + } + + private TopDocs searchOperations(FieldDoc after, boolean accurate) throws IOException { + final Query rangeQuery = operationsRangeQuery(Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo); final Sort sortedBySeqNo = new Sort(new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG)); - return indexSearcher.searchAfter(after, rangeQuery, searchBatchSize, sortedBySeqNo); + final TopFieldCollector topFieldCollector = TopFieldCollector.create( + sortedBySeqNo, + searchBatchSize, + after, + accurate ? Integer.MAX_VALUE : 0 + ); + indexSearcher.search(rangeQuery, topFieldCollector); + return topFieldCollector.topDocs(); } private Translog.Operation readDocAsOp(int docIndex) throws IOException { diff --git a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java index 32d6b9b98d169..43fe10c217270 100644 --- a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java @@ -325,10 +325,23 @@ public Closeable acquireHistoryRetentionLock() { } @Override - public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) { + public Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) { return newEmptySnapshot(); } + @Override + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + try (Translog.Snapshot snapshot = newChangesSnapshot(source, fromSeqNo, toSeqNo, false, true)) { + return snapshot.totalOperations(); + } + } + public boolean hasCompleteOperationHistory(String reason, long startingSeqNo) { // we can do operation-based recovery if we don't have to replay any operation. return startingSeqNo > seqNoStats.getMaxSeqNo(); diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index ad370051c53ac..f2630ad05b488 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -2231,13 +2231,13 @@ public Closeable acquireHistoryRetentionLock() { } /** - * * Creates a new history snapshot for reading operations since * the provided starting seqno (inclusive) and ending seqno (inclusive) * The returned snapshot can be retrieved from either Lucene index or translog files. */ - public Translog.Snapshot getHistoryOperations(String reason, long startingSeqNo, long endSeqNo) throws IOException { - return getEngine().newChangesSnapshot(reason, startingSeqNo, endSeqNo, true); + public Translog.Snapshot getHistoryOperations(String reason, long startingSeqNo, long endSeqNo, boolean accurateCount) + throws IOException { + return getEngine().newChangesSnapshot(reason, startingSeqNo, endSeqNo, true, accurateCount); } /** @@ -2257,6 +2257,17 @@ public long getMinRetainedSeqNo() { return getEngine().getMinRetainedSeqNo(); } + /** + * Counts the number of history operations within the provided sequence numbers + * @param source source of the requester (e.g., peer-recovery) + * @param fromSeqNo from sequence number, included + * @param toSeqNo to sequence number, included + * @return number of history operations in the sequence number range + */ + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + return getEngine().countNumberOfHistoryOperations(source, fromSeqNo, toSeqNo); + } + /** * Creates a new changes snapshot for reading operations whose seq_no are between {@code fromSeqNo}(inclusive) * and {@code toSeqNo}(inclusive). The caller has to close the returned snapshot after finishing the reading. @@ -2268,8 +2279,14 @@ public long getMinRetainedSeqNo() { * if any operation between {@code fromSeqNo} and {@code toSeqNo} is missing. * This parameter should be only enabled when the entire requesting range is below the global checkpoint. */ - public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { - return getEngine().newChangesSnapshot(source, fromSeqNo, toSeqNo, requiredFullRange); + public Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException { + return getEngine().newChangesSnapshot(source, fromSeqNo, toSeqNo, requiredFullRange, accurateCount); } public List segments(boolean verbose) { diff --git a/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java index bbdf948af5c32..726d2925177fa 100644 --- a/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java @@ -104,7 +104,7 @@ public void resync(final IndexShard indexShard, final ActionListener // Wrap translog snapshot to make it synchronized as it is accessed by different threads through SnapshotSender. // Even though those calls are not concurrent, snapshot.next() uses non-synchronized state and is not multi-thread-compatible // Also fail the resync early if the shard is shutting down - snapshot = indexShard.newChangesSnapshot("resync", startingSeqNo, Long.MAX_VALUE, false); + snapshot = indexShard.newChangesSnapshot("resync", startingSeqNo, Long.MAX_VALUE, false, true); final Translog.Snapshot originalSnapshot = snapshot; final Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { @Override diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java index 7899b11330a34..77596f50a8a5e 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java @@ -132,7 +132,7 @@ public class RecoverySourceHandler { private final CancellableThreads cancellableThreads = new CancellableThreads(); private final List resources = new CopyOnWriteArrayList<>(); private final ListenableFuture future = new ListenableFuture<>(); - private static final String PEER_RECOVERY_NAME = "peer-recovery"; + public static final String PEER_RECOVERY_NAME = "peer-recovery"; public RecoverySourceHandler( IndexShard shard, @@ -272,7 +272,7 @@ && isTargetSameHistory() logger.trace("performing file-based recovery followed by history replay starting at [{}]", startingSeqNo); try { - final int estimateNumOps = estimateNumberOfHistoryOperations(startingSeqNo); + final int estimateNumOps = countNumberOfHistoryOperations(startingSeqNo); final Releasable releaseStore = acquireStore(shard.store()); resources.add(releaseStore); sendFileStep.whenComplete(r -> IOUtils.close(wrappedSafeCommit, releaseStore), e -> { @@ -319,7 +319,7 @@ && isTargetSameHistory() sendFileStep.whenComplete(r -> { assert Transports.assertNotTransportThread(RecoverySourceHandler.this + "[prepareTargetForTranslog]"); // For a sequence based recovery, the target can keep its local translog - prepareTargetForTranslog(estimateNumberOfHistoryOperations(startingSeqNo), prepareEngineStep); + prepareTargetForTranslog(countNumberOfHistoryOperations(startingSeqNo), prepareEngineStep); }, onFailure); prepareEngineStep.whenComplete(prepareEngineTime -> { @@ -340,9 +340,15 @@ && isTargetSameHistory() final long endingSeqNo = shard.seqNoStats().getMaxSeqNo(); if (logger.isTraceEnabled()) { - logger.trace("snapshot translog for recovery; current size is [{}]", estimateNumberOfHistoryOperations(startingSeqNo)); + logger.trace("snapshot translog for recovery; current size is [{}]", countNumberOfHistoryOperations(startingSeqNo)); } - final Translog.Snapshot phase2Snapshot = shard.newChangesSnapshot(PEER_RECOVERY_NAME, startingSeqNo, Long.MAX_VALUE, false); + final Translog.Snapshot phase2Snapshot = shard.newChangesSnapshot( + PEER_RECOVERY_NAME, + startingSeqNo, + Long.MAX_VALUE, + false, + true + ); resources.add(phase2Snapshot); retentionLock.close(); @@ -403,10 +409,13 @@ private boolean isTargetSameHistory() { return targetHistoryUUID.equals(shard.getHistoryUUID()); } - private int estimateNumberOfHistoryOperations(long startingSeqNo) throws IOException { - try (Translog.Snapshot snapshot = shard.newChangesSnapshot(PEER_RECOVERY_NAME, startingSeqNo, Long.MAX_VALUE, false)) { - return snapshot.totalOperations(); - } + /** + * Counts the number of history operations from the starting sequence number + * @param startingSeqNo the starting sequence number to count; included + * @return number of history operations + */ + private int countNumberOfHistoryOperations(long startingSeqNo) throws IOException { + return shard.countNumberOfHistoryOperations(PEER_RECOVERY_NAME, startingSeqNo, Long.MAX_VALUE); } static void runUnderPrimaryPermit( diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java index 3ea7cad528e82..394b093059385 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java @@ -344,11 +344,11 @@ public void finalizeRecovery(final long globalCheckpoint, final long trimAboveSe private boolean hasUncommittedOperations() throws IOException { long localCheckpointOfCommit = Long.parseLong(indexShard.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); - try ( - Translog.Snapshot snapshot = indexShard.newChangesSnapshot("peer-recovery", localCheckpointOfCommit + 1, Long.MAX_VALUE, false) - ) { - return snapshot.totalOperations() > 0; - } + return indexShard.countNumberOfHistoryOperations( + RecoverySourceHandler.PEER_RECOVERY_NAME, + localCheckpointOfCommit + 1, + Long.MAX_VALUE + ) > 0; } @Override diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index af9b913b11d56..33f09a3e67db8 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -6362,8 +6362,12 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { latch.await(); - Translog.Snapshot changes = engine.newChangesSnapshot("test", min, max, true); - changes.close(); + if (randomBoolean()) { + Translog.Snapshot changes = engine.newChangesSnapshot("test", min, max, true, randomBoolean()); + changes.close(); + } else { + engine.countNumberOfHistoryOperations("test", min, max); + } } }); snapshotThreads[i].start(); diff --git a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java index bd191e235369d..e3117e179e7fa 100644 --- a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java +++ b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java @@ -74,14 +74,14 @@ public void testBasics() throws Exception { long fromSeqNo = randomNonNegativeLong(); long toSeqNo = randomLongBetween(fromSeqNo, Long.MAX_VALUE); // Empty engine - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { IllegalStateException error = expectThrows(IllegalStateException.class, () -> drainAll(snapshot)); assertThat( error.getMessage(), containsString("Not all operations between from_seqno [" + fromSeqNo + "] and to_seqno [" + toSeqNo + "] found") ); } - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.size(0)); } int numOps = between(1, 100); @@ -114,7 +114,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - false + false, + randomBoolean() ) ) { searcher = null; @@ -130,7 +131,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -152,7 +154,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - false + false, + randomBoolean() ) ) { searcher = null; @@ -167,7 +170,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -187,7 +191,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -199,7 +204,7 @@ public void testBasics() throws Exception { // Get snapshot via engine will auto refresh fromSeqNo = randomLongBetween(0, numOps - 1); toSeqNo = randomLongBetween(fromSeqNo, numOps - 1); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, randomBoolean())) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, randomBoolean(), randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsSeqNoRange(fromSeqNo, toSeqNo)); } } @@ -230,8 +235,11 @@ public void testSkipNonRootOfNestedDocuments() throws Exception { long maxSeqNo = engine.getLocalCheckpointTracker().getMaxSeqNo(); engine.refresh("test"); Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); - try (Translog.Snapshot snapshot = new LuceneChangesSnapshot(searcher, between(1, 100), 0, maxSeqNo, false)) { - assertThat(snapshot.totalOperations(), equalTo(seqNoToTerm.size())); + final boolean accurateCount = randomBoolean(); + try (Translog.Snapshot snapshot = new LuceneChangesSnapshot(searcher, between(1, 100), 0, maxSeqNo, false, accurateCount)) { + if (accurateCount == true) { + assertThat(snapshot.totalOperations(), equalTo(seqNoToTerm.size())); + } Translog.Operation op; while ((op = snapshot.next()) != null) { assertThat(op.toString(), op.primaryTerm(), equalTo(seqNoToTerm.get(op.seqNo()))); @@ -306,7 +314,7 @@ void pullOperations(InternalEngine follower) throws IOException { long fromSeqNo = followerCheckpoint + 1; long batchSize = randomLongBetween(0, 100); long toSeqNo = Math.min(fromSeqNo + batchSize, leaderCheckpoint); - try (Translog.Snapshot snapshot = leader.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = leader.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { translogHandler.run(follower, snapshot); } } @@ -352,7 +360,7 @@ private List drainAll(Translog.Snapshot snapshot) throws IOE public void testOverFlow() throws Exception { long fromSeqNo = randomLongBetween(0, 5); long toSeqNo = randomLongBetween(Long.MAX_VALUE - 5, Long.MAX_VALUE); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { IllegalStateException error = expectThrows(IllegalStateException.class, () -> drainAll(snapshot)); assertThat( error.getMessage(), diff --git a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java index a88db8473cae0..d262b5abec0f3 100644 --- a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java @@ -499,7 +499,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { assertThat(snapshot.totalOperations(), equalTo(0)); } } - try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps)); } } @@ -517,7 +517,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(Collections.singletonList(noop2))); } } - try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps)); } } @@ -619,7 +619,7 @@ public void testSeqNoCollision() throws Exception { shards.promoteReplicaToPrimary(replica2).get(); logger.info("--> Recover replica3 from replica2"); recoverReplica(replica3, replica2, true); - try (Translog.Snapshot snapshot = replica3.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = replica3.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, true)) { assertThat(snapshot.totalOperations(), equalTo(initDocs + 1)); final List expectedOps = new ArrayList<>(initOperations); expectedOps.add(op2); diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java index c714bd0eb85a2..5e09e0f2253df 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java @@ -225,7 +225,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { IndexShard newReplica = shards.addReplicaWithExistingPath(orgPrimary.shardPath(), orgPrimary.routingEntry().currentNodeId()); shards.recoverReplica(newReplica); shards.assertAllEqual(3); - try (Translog.Snapshot snapshot = newReplica.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = newReplica.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.size(6)); } } diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index fe810a87358d0..2bce5a7c81794 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -1312,7 +1312,7 @@ public static List getDocIds(Engine engine, boolean refresh */ public static List readAllOperationsInLucene(Engine engine) throws IOException { final List operations = new ArrayList<>(); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); @@ -1326,7 +1326,7 @@ public static List readAllOperationsInLucene(Engine engine) */ public static List readAllOperationsBasedOnSource(Engine engine) throws IOException { final List operations = new ArrayList<>(); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); From 006c832c5fe8f509aa6285de90f2c7583b3dff35 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 15 Mar 2022 15:48:13 -0500 Subject: [PATCH 11/12] [Upgrade] Lucene 9.0.0 release (#1109) This commit upgrades the core codebase from Lucene 8.10.1 to lucene 9.0.0. It includes all necessary refactoring of features and API changes when upgrading to a new major Lucene release. Signed-off-by: Nicholas Walter Knize Co-authored-by: Andriy Redko --- build.gradle | 5 +- .../src/main/resources/minimumRuntimeVersion | 2 +- buildSrc/version.properties | 4 +- .../common/settings/KeyStoreWrapperTests.java | 21 +- .../analysis/common/ClassicFilterFactory.java | 2 +- .../common/ClassicTokenizerFactory.java | 2 +- .../analysis/common/CommonAnalysisPlugin.java | 6 +- .../common/MinHashTokenFilterFactory.java | 16 +- .../common/UAX29URLEmailTokenizerFactory.java | 2 +- .../common/CommonAnalysisFactoryTests.java | 4 +- .../common/DisableGraphQueryTests.java | 8 +- .../lucene-expressions-8.10.1.jar.sha1 | 1 - .../lucene-expressions-9.0.0.jar.sha1 | 1 + .../expression/ExpressionScriptEngine.java | 7 +- .../plugin-metadata/plugin-security.policy | 1 + .../mapper/SearchAsYouTypeFieldMapper.java | 8 +- .../SearchAsYouTypeFieldMapperTests.java | 6 +- .../join/query/HasChildQueryBuilder.java | 6 + .../opensearch/percolator/PercolateQuery.java | 10 +- .../percolator/PercolatorFieldMapper.java | 4 +- .../PercolatorMatchedSlotSubFetchPhase.java | 5 +- .../opensearch/percolator/QueryAnalyzer.java | 4 +- .../percolator/CandidateQueryTests.java | 27 +- .../percolator/PercolateQueryTests.java | 4 +- .../PercolatorFieldMapperTests.java | 4 +- .../percolator/QueryAnalyzerTests.java | 16 +- plugins/analysis-icu/build.gradle | 4 +- .../analysis-icu/licenses/icu4j-62.1.jar.sha1 | 1 - .../analysis-icu/licenses/icu4j-68.2.jar.sha1 | 1 + .../lucene-analysis-icu-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-icu-8.10.1.jar.sha1 | 1 - .../index/analysis/ICUCollationKeyFilter.java | 2 +- plugins/analysis-kuromoji/build.gradle | 2 +- .../lucene-analysis-kuromoji-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-kuromoji-8.10.1.jar.sha1 | 1 - plugins/analysis-nori/build.gradle | 2 +- .../lucene-analysis-nori-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-nori-8.10.1.jar.sha1 | 1 - plugins/analysis-phonetic/build.gradle | 2 +- .../lucene-analysis-phonetic-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-phonetic-8.10.1.jar.sha1 | 1 - plugins/analysis-smartcn/build.gradle | 2 +- .../lucene-analysis-smartcn-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-smartcn-8.10.1.jar.sha1 | 1 - plugins/analysis-stempel/build.gradle | 2 +- .../lucene-analysis-stempel-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-stempel-8.10.1.jar.sha1 | 1 - plugins/analysis-ukrainian/build.gradle | 2 +- .../lucene-analysis-morfologik-9.0.0.jar.sha1 | 1 + ...ucene-analyzers-morfologik-8.10.1.jar.sha1 | 1 - .../AnnotatedTextHighlighterTests.java | 1 - .../opensearch/index/store/SmbNIOFsTests.java | 27 +- server/build.gradle | 2 +- .../lucene-analysis-common-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-common-8.10.1.jar.sha1 | 1 - .../lucene-backward-codecs-8.10.1.jar.sha1 | 1 - .../lucene-backward-codecs-9.0.0.jar.sha1 | 1 + server/licenses/lucene-core-8.10.1.jar.sha1 | 1 - server/licenses/lucene-core-9.0.0.jar.sha1 | 1 + .../licenses/lucene-grouping-8.10.1.jar.sha1 | 1 - .../licenses/lucene-grouping-9.0.0.jar.sha1 | 1 + .../lucene-highlighter-8.10.1.jar.sha1 | 1 - .../lucene-highlighter-9.0.0.jar.sha1 | 1 + server/licenses/lucene-join-8.10.1.jar.sha1 | 1 - server/licenses/lucene-join-9.0.0.jar.sha1 | 1 + server/licenses/lucene-memory-8.10.1.jar.sha1 | 1 - server/licenses/lucene-memory-9.0.0.jar.sha1 | 1 + server/licenses/lucene-misc-8.10.1.jar.sha1 | 1 - server/licenses/lucene-misc-9.0.0.jar.sha1 | 1 + .../licenses/lucene-queries-8.10.1.jar.sha1 | 1 - server/licenses/lucene-queries-9.0.0.jar.sha1 | 1 + .../lucene-queryparser-8.10.1.jar.sha1 | 1 - .../lucene-queryparser-9.0.0.jar.sha1 | 1 + .../licenses/lucene-sandbox-8.10.1.jar.sha1 | 1 - server/licenses/lucene-sandbox-9.0.0.jar.sha1 | 1 + .../lucene-spatial-extras-8.10.1.jar.sha1 | 1 - .../lucene-spatial-extras-9.0.0.jar.sha1 | 1 + .../licenses/lucene-spatial3d-8.10.1.jar.sha1 | 1 - .../licenses/lucene-spatial3d-9.0.0.jar.sha1 | 1 + .../licenses/lucene-suggest-8.10.1.jar.sha1 | 1 - server/licenses/lucene-suggest-9.0.0.jar.sha1 | 1 + .../recovery/IndexPrimaryRelocationIT.java | 1 + .../org/opensearch/recovery/RelocationIT.java | 14 +- .../highlight/HighlighterSearchIT.java | 30 + .../search/query/QueryStringIT.java | 2 - .../validate/SimpleValidateQueryIT.java | 7 +- .../similarity/LegacyBM25Similarity.java | 117 +++ .../queries/BinaryDocValuesRangeQuery.java | 8 + .../lucene/queries/SpanMatchNoDocsQuery.java | 16 +- .../uhighlight/CustomUnifiedHighlighter.java | 26 +- .../vectorhighlight/CustomFieldQuery.java | 2 +- .../apache/lucene/util/CombinedBitSet.java | 5 + .../apache/lucene/util/SPIClassIterator.java | 186 +++++ .../apache/lucene/util/packed/XPacked64.java | 317 ++++++++ .../util/packed/XPacked64SingleBlock.java | 574 ++++++++++++++ .../lucene/util/packed/XPackedInts.java | 740 ++++++++++++++++++ .../src/main/java/org/opensearch/Version.java | 2 +- .../segments/IndicesSegmentResponse.java | 7 - .../action/search/SearchPhaseController.java | 7 +- .../action/search/TransportSearchHelper.java | 9 +- .../opensearch/common/bytes/BytesArray.java | 4 +- .../common/bytes/CompositeBytesReference.java | 5 +- .../org/opensearch/common/geo/GeoUtils.java | 4 +- .../org/opensearch/common/lucene/Lucene.java | 17 +- .../common/lucene/MinimumScoreCollector.java | 2 +- .../lucene/search/MoreLikeThisQuery.java | 6 + .../lucene/search/MultiPhrasePrefixQuery.java | 6 + .../common/lucene/search/Queries.java | 4 +- .../SpanBooleanQueryRewriteWithMaxClause.java | 8 +- .../search/function/FunctionScoreQuery.java | 7 - .../search/function/ScriptScoreQuery.java | 7 - .../common/settings/KeyStoreWrapper.java | 8 +- .../opensearch/common/util/CuckooFilter.java | 14 +- .../gateway/MetadataStateFormat.java | 5 +- .../index/cache/bitset/BitsetFilterCache.java | 5 +- .../opensearch/index/codec/CodecService.java | 8 +- .../PerFieldMappingPostingFormatCodec.java | 8 +- .../org/opensearch/index/engine/Engine.java | 4 - .../index/engine/InternalEngine.java | 4 +- .../index/engine/LuceneChangesSnapshot.java | 3 +- .../engine/PrunePostingsMergePolicy.java | 5 - .../RecoverySourcePruneMergePolicy.java | 16 +- .../org/opensearch/index/engine/Segment.java | 28 +- .../index/engine/TranslogLeafReader.java | 22 +- .../fielddata/IndexNumericFieldData.java | 2 + .../plain/PagedBytesIndexFieldData.java | 55 +- .../plain/SortedNumericIndexFieldData.java | 2 +- .../index/fieldvisitor/FieldsVisitor.java | 10 +- .../fieldvisitor/SingleFieldsVisitor.java | 5 +- .../opensearch/index/get/ShardGetService.java | 5 +- .../index/mapper/CompletionFieldMapper.java | 4 +- .../index/mapper/DateFieldMapper.java | 2 +- .../index/mapper/MappedFieldType.java | 4 +- .../index/mapper/NumberFieldMapper.java | 4 +- .../opensearch/index/mapper/RangeType.java | 6 +- .../index/mapper/TextFieldMapper.java | 17 +- .../index/query/AbstractQueryBuilder.java | 6 +- .../query/FieldMaskingSpanQueryBuilder.java | 7 +- .../index/query/InnerHitContextBuilder.java | 4 +- .../index/query/NestedQueryBuilder.java | 4 +- .../index/query/ScriptQueryBuilder.java | 6 + .../query/SpanContainingQueryBuilder.java | 6 +- .../index/query/SpanFirstQueryBuilder.java | 4 +- .../query/SpanMultiTermQueryBuilder.java | 2 +- .../index/query/SpanNearQueryBuilder.java | 4 +- .../index/query/SpanNotQueryBuilder.java | 4 +- .../index/query/SpanOrQueryBuilder.java | 4 +- .../index/query/SpanTermQueryBuilder.java | 4 +- .../index/query/SpanWithinQueryBuilder.java | 6 +- .../index/query/TermsSetQueryBuilder.java | 2 +- .../opensearch/index/search/MatchQuery.java | 15 +- .../index/search/QueryStringQueryParser.java | 14 +- .../search/SimpleQueryStringQueryParser.java | 9 +- .../index/shard/ShardSplittingQuery.java | 15 +- .../opensearch/index/shard/StoreRecovery.java | 2 +- .../index/similarity/SimilarityProviders.java | 6 +- .../index/similarity/SimilarityService.java | 2 +- .../org/opensearch/index/store/Store.java | 36 +- .../opensearch/index/translog/Checkpoint.java | 31 +- .../opensearch/index/translog/Translog.java | 1 - .../index/translog/TranslogReader.java | 1 - .../translog/TruncateTranslogAction.java | 2 - .../opensearch/indices/IndicesQueryCache.java | 6 - .../indices/analysis/AnalysisModule.java | 2 +- .../indices/analysis/PreBuiltAnalyzers.java | 22 +- .../recovery/PeerRecoveryTargetService.java | 23 +- .../lucene/queries/MinDocQuery.java | 8 +- .../queries/SearchAfterSortedDocQuery.java | 8 +- .../opensearch/plugins/PluginsService.java | 7 - .../search/DefaultSearchContext.java | 2 +- .../org/opensearch/search/SearchService.java | 6 +- .../aggregations/MultiBucketCollector.java | 2 +- .../bucket/composite/CompositeAggregator.java | 2 +- .../composite/PointsSortedDocsProducer.java | 7 +- .../bucket/nested/NestedAggregator.java | 4 +- .../nested/ReverseNestedAggregator.java | 2 +- ...DiversifiedBytesHashSamplerAggregator.java | 4 +- .../DiversifiedMapSamplerAggregator.java | 4 +- .../DiversifiedNumericSamplerAggregator.java | 4 +- .../DiversifiedOrdinalsSamplerAggregator.java | 4 +- .../bucket/sampler/SamplerAggregator.java | 2 +- .../aggregations/metrics/InternalTopHits.java | 10 +- .../aggregations/metrics/MaxAggregator.java | 4 +- .../metrics/TopHitsAggregatorFactory.java | 6 +- .../opensearch/search/fetch/FetchPhase.java | 19 +- .../fetch/subphase/FetchDocValuesContext.java | 12 +- .../fetch/subphase/InnerHitsContext.java | 4 +- .../highlight/UnifiedHighlighter.java | 10 - .../search/internal/ContextIndexSearcher.java | 8 +- .../search/profile/query/ProfileWeight.java | 6 +- .../opensearch/search/query/QueryPhase.java | 33 +- .../search/query/TopDocsCollectorContext.java | 2 +- .../opensearch/search/slice/SliceQuery.java | 7 + .../opensearch/search/sort/SortBuilder.java | 2 +- .../org/opensearch/bootstrap/security.policy | 2 + .../org/opensearch/LegacyESVersionTests.java | 6 +- .../segments/IndicesSegmentsRequestTests.java | 14 - .../allocation/IndexShardHotSpotTests.java | 1 + .../opensearch/common/lucene/LuceneTests.java | 11 +- .../common/lucene/search/QueriesTests.java | 6 +- .../search/function/MinScoreScorerTests.java | 5 - .../gateway/MetadataStateFormatTests.java | 2 +- .../opensearch/index/codec/CodecTests.java | 41 +- .../engine/CompletionStatsCacheTests.java | 8 +- .../index/engine/InternalEngineTests.java | 4 - .../index/engine/LiveVersionMapTests.java | 4 +- .../opensearch/index/engine/SegmentTests.java | 2 +- .../index/engine/VersionValueTests.java | 4 +- .../index/mapper/DateFieldTypeTests.java | 2 +- .../index/mapper/NumberFieldTypeTests.java | 5 +- .../mapper/StoredNumericValuesTests.java | 2 +- .../index/mapper/TextFieldMapperTests.java | 8 +- .../index/query/DisMaxQueryBuilderTests.java | 31 +- .../FieldMaskingSpanQueryBuilderTests.java | 43 +- .../MatchBoolPrefixQueryBuilderTests.java | 4 +- .../index/query/MatchQueryBuilderTests.java | 8 +- .../query/MultiMatchQueryBuilderTests.java | 58 +- .../query/QueryStringQueryBuilderTests.java | 77 +- .../query/SimpleQueryStringBuilderTests.java | 35 +- .../SpanContainingQueryBuilderTests.java | 2 +- .../query/SpanFirstQueryBuilderTests.java | 2 +- .../index/query/SpanGapQueryBuilderTests.java | 13 +- .../query/SpanMultiTermQueryBuilderTests.java | 13 +- .../query/SpanNearQueryBuilderTests.java | 10 +- .../index/query/SpanNotQueryBuilderTests.java | 2 +- .../index/query/SpanOrQueryBuilderTests.java | 4 +- .../query/SpanTermQueryBuilderTests.java | 2 +- .../query/SpanWithinQueryBuilderTests.java | 2 +- .../query/TermsSetQueryBuilderTests.java | 2 +- .../query/plugin/DummyQueryParserPlugin.java | 6 + .../index/search/MultiMatchQueryTests.java | 9 +- .../search/nested/NestedSortingTests.java | 3 +- .../similarity/ScriptedSimilarityTests.java | 3 +- .../similarity/SimilarityServiceTests.java | 2 +- .../index/similarity/SimilarityTests.java | 2 +- .../opensearch/index/store/StoreTests.java | 11 +- .../index/translog/TranslogTests.java | 26 +- .../indices/IndicesQueryCacheTests.java | 13 +- .../indices/analysis/AnalysisModuleTests.java | 1 - .../indices/recovery/RecoveryStatusTests.java | 5 +- .../DeDuplicatingTokenFilterTests.java | 5 +- .../TruncateTokenFilterTests.java | 3 +- .../CollapsingTopDocsCollectorTests.java | 6 +- .../index/ShuffleForcedMergePolicyTests.java | 11 +- .../similarity/LegacyBM25SimilarityTests.java | 121 +++ ...ndomBinaryDocValuesRangeQueryTestCase.java | 2 +- .../BinaryDocValuesRangeQueryTests.java | 2 +- .../lucene/queries/BlendedTermQueryTests.java | 9 +- ...eRandomBinaryDocValuesRangeQueryTests.java | 2 +- ...tRandomBinaryDocValuesRangeQueryTests.java | 2 +- ...sRandomBinaryDocValuesRangeQueryTests.java | 21 +- ...rRandomBinaryDocValuesRangeQueryTests.java | 2 +- ...gRandomBinaryDocValuesRangeQueryTests.java | 2 +- .../lucene/queries/MinDocQueryTests.java | 2 +- .../SearchAfterSortedDocQueryTests.java | 2 +- .../queries/SpanMatchNoDocsQueryTests.java | 11 +- .../BoundedBreakIteratorScannerTests.java | 3 +- .../CustomPassageFormatterTests.java | 5 +- .../CustomUnifiedHighlighterTests.java | 90 ++- .../lucene/util/CombinedBitSetTests.java | 6 +- .../plugins/PluginsServiceTests.java | 4 + .../composite/CompositeAggregatorTests.java | 18 +- .../bucket/nested/NestedAggregatorTests.java | 6 +- .../metrics/MaxAggregatorTests.java | 3 +- .../internal/ContextIndexSearcherTests.java | 11 +- .../search/lookup/LeafFieldsLookupTests.java | 3 + .../profile/query/QueryProfilerTests.java | 15 +- .../search/query/QueryPhaseTests.java | 12 +- .../search/sort/FieldSortBuilderTests.java | 2 +- .../analysis/AnalysisFactoryTestCase.java | 17 +- .../aggregations/AggregatorTestCase.java | 2 +- .../test/AbstractQueryTestCase.java | 12 +- .../org/opensearch/test/CorruptionUtils.java | 2 +- .../test/hamcrest/OpenSearchAssertions.java | 9 - 274 files changed, 3052 insertions(+), 980 deletions(-) delete mode 100644 modules/lang-expression/licenses/lucene-expressions-8.10.1.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/icu4j-68.2.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.10.1.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.10.1.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.10.1.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.10.1.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.10.1.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.10.1.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-analysis-common-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-8.10.1.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-core-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-core-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-grouping-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-join-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-join-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-memory-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-memory-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-misc-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-misc-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-queries-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-queries-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-suggest-9.0.0.jar.sha1 create mode 100644 server/src/main/java/org/apache/lucene/misc/search/similarity/LegacyBM25Similarity.java create mode 100644 server/src/main/java/org/apache/lucene/util/SPIClassIterator.java create mode 100644 server/src/main/java/org/apache/lucene/util/packed/XPacked64.java create mode 100644 server/src/main/java/org/apache/lucene/util/packed/XPacked64SingleBlock.java create mode 100644 server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java rename server/src/main/java/org/{apache => opensearch}/lucene/queries/MinDocQuery.java (96%) rename server/src/main/java/org/{apache => opensearch}/lucene/queries/SearchAfterSortedDocQuery.java (97%) rename server/src/test/java/org/{apache => opensearch}/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java (95%) rename server/src/test/java/org/{apache => opensearch}/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java (95%) rename server/src/test/java/org/{apache => opensearch}/lucene/grouping/CollapsingTopDocsCollectorTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/index/ShuffleForcedMergePolicyTests.java (89%) create mode 100644 server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java rename server/src/test/java/org/{apache => opensearch}/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/BinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/BlendedTermQueryTests.java (98%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java (81%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/MinDocQueryTests.java (98%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/SearchAfterSortedDocQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/SpanMatchNoDocsQueryTests.java (93%) rename server/src/test/java/org/{apache => opensearch}/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java (98%) rename server/src/test/java/org/{apache => opensearch}/lucene/search/uhighlight/CustomPassageFormatterTests.java (95%) rename server/src/test/java/org/{apache => opensearch}/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java (82%) rename server/src/test/java/org/{apache => opensearch}/lucene/util/CombinedBitSetTests.java (95%) diff --git a/build.gradle b/build.gradle index 374bfb3ccfae3..be5766f327e0d 100644 --- a/build.gradle +++ b/build.gradle @@ -230,7 +230,10 @@ tasks.register("branchConsistency") { allprojects { // configure compiler options tasks.withType(JavaCompile).configureEach { JavaCompile compile -> - compile.options.compilerArgs << '-Werror' + // See please https://bugs.openjdk.java.net/browse/JDK-8209058 + if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_11) { + compile.options.compilerArgs << '-Werror' + } compile.options.compilerArgs << '-Xlint:auxiliaryclass' compile.options.compilerArgs << '-Xlint:cast' compile.options.compilerArgs << '-Xlint:classfile' diff --git a/buildSrc/src/main/resources/minimumRuntimeVersion b/buildSrc/src/main/resources/minimumRuntimeVersion index 9d607966b721a..b4de394767536 100644 --- a/buildSrc/src/main/resources/minimumRuntimeVersion +++ b/buildSrc/src/main/resources/minimumRuntimeVersion @@ -1 +1 @@ -11 \ No newline at end of file +11 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index bfc939394bdaa..7682a982e8186 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ opensearch = 2.0.0 -lucene = 8.10.1 +lucene = 9.0.0 bundled_jdk_vendor = adoptium bundled_jdk = 17.0.2+8 @@ -11,7 +11,7 @@ spatial4j = 0.7 jts = 1.15.0 jackson = 2.12.6 snakeyaml = 1.26 -icu4j = 62.1 +icu4j = 68.2 supercsv = 2.4.0 log4j = 2.17.1 slf4j = 1.6.2 diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java index 52130d6e270df..2688e7637c9ba 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java @@ -32,7 +32,9 @@ package org.opensearch.common.settings; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.NIOFSDirectory; @@ -328,13 +330,14 @@ private void possiblyAlterEncryptedBytes( byte[] encryptedBytes, int truncEncryptedDataLength ) throws Exception { - indexOutput.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length); - indexOutput.writeInt(salt.length); - indexOutput.writeBytes(salt, salt.length); - indexOutput.writeInt(iv.length); - indexOutput.writeBytes(iv, iv.length); - indexOutput.writeInt(encryptedBytes.length - truncEncryptedDataLength); - indexOutput.writeBytes(encryptedBytes, encryptedBytes.length); + DataOutput io = EndiannessReverserUtil.wrapDataOutput(indexOutput); + io.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length); + io.writeInt(salt.length); + io.writeBytes(salt, salt.length); + io.writeInt(iv.length); + io.writeBytes(iv, iv.length); + io.writeInt(encryptedBytes.length - truncEncryptedDataLength); + io.writeBytes(encryptedBytes, encryptedBytes.length); } public void testUpgradeAddsSeed() throws Exception { @@ -363,7 +366,7 @@ public void testBackcompatV1() throws Exception { assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); Path configDir = env.configFile(); NIOFSDirectory directory = new NIOFSDirectory(configDir); - try (IndexOutput output = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) { + try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, "opensearch.keystore", IOContext.DEFAULT)) { CodecUtil.writeHeader(output, "opensearch.keystore", 1); output.writeByte((byte) 0); // hasPassword = false output.writeString("PKCS12"); @@ -396,7 +399,7 @@ public void testBackcompatV2() throws Exception { NIOFSDirectory directory = new NIOFSDirectory(configDir); byte[] fileBytes = new byte[20]; random().nextBytes(fileBytes); - try (IndexOutput output = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) { + try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, "opensearch.keystore", IOContext.DEFAULT)) { CodecUtil.writeHeader(output, "opensearch.keystore", 2); output.writeByte((byte) 0); // hasPassword = false diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicFilterFactory.java index a50ff8ac5bdd1..92e28b2ad9ee7 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicFilterFactory.java @@ -32,7 +32,7 @@ package org.opensearch.analysis.common; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.standard.ClassicFilter; +import org.apache.lucene.analysis.classic.ClassicFilter; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicTokenizerFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicTokenizerFactory.java index 978ba807336a5..9528e0991fe82 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicTokenizerFactory.java @@ -33,7 +33,7 @@ package org.opensearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.standard.ClassicTokenizer; +import org.apache.lucene.analysis.classic.ClassicTokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java index 47a144311c0a7..c69917ed52be8 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java @@ -51,6 +51,8 @@ import org.apache.lucene.analysis.cjk.CJKWidthFilter; import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; +import org.apache.lucene.analysis.classic.ClassicFilter; +import org.apache.lucene.analysis.classic.ClassicTokenizer; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.core.KeywordTokenizer; @@ -64,6 +66,7 @@ import org.apache.lucene.analysis.de.GermanNormalizationFilter; import org.apache.lucene.analysis.de.GermanStemFilter; import org.apache.lucene.analysis.el.GreekAnalyzer; +import org.apache.lucene.analysis.email.UAX29URLEmailTokenizer; import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.en.KStemFilter; import org.apache.lucene.analysis.en.PorterStemFilter; @@ -113,10 +116,7 @@ import org.apache.lucene.analysis.ru.RussianAnalyzer; import org.apache.lucene.analysis.shingle.ShingleFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; -import org.apache.lucene.analysis.standard.ClassicFilter; -import org.apache.lucene.analysis.standard.ClassicTokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; import org.apache.lucene.analysis.sv.SwedishAnalyzer; import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.th.ThaiTokenizer; diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java index c9786ac89c005..e76354ae3a765 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java @@ -62,10 +62,18 @@ public TokenStream create(TokenStream tokenStream) { private Map convertSettings(Settings settings) { Map settingMap = new HashMap<>(); - settingMap.put("hashCount", settings.get("hash_count")); - settingMap.put("bucketCount", settings.get("bucket_count")); - settingMap.put("hashSetSize", settings.get("hash_set_size")); - settingMap.put("withRotation", settings.get("with_rotation")); + if (settings.hasValue("hash_count")) { + settingMap.put("hashCount", settings.get("hash_count")); + } + if (settings.hasValue("bucketCount")) { + settingMap.put("bucketCount", settings.get("bucket_count")); + } + if (settings.hasValue("hashSetSize")) { + settingMap.put("hashSetSize", settings.get("hash_set_size")); + } + if (settings.hasValue("with_rotation")) { + settingMap.put("withRotation", settings.get("with_rotation")); + } return settingMap; } } diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/UAX29URLEmailTokenizerFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/UAX29URLEmailTokenizerFactory.java index 8fcfb2c599ae0..8d6e0ec0815b4 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/UAX29URLEmailTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/UAX29URLEmailTokenizerFactory.java @@ -34,7 +34,7 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; +import org.apache.lucene.analysis.email.UAX29URLEmailTokenizer; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java index bced9c334d9f6..4cf0d1de28717 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java @@ -110,6 +110,7 @@ protected Map> getTokenFilters() { filters.put("latvianstem", StemmerTokenFilterFactory.class); filters.put("norwegianlightstem", StemmerTokenFilterFactory.class); filters.put("norwegianminimalstem", StemmerTokenFilterFactory.class); + filters.put("norwegiannormalization", Void.class); filters.put("portuguesestem", StemmerTokenFilterFactory.class); filters.put("portugueselightstem", StemmerTokenFilterFactory.class); filters.put("portugueseminimalstem", StemmerTokenFilterFactory.class); @@ -117,6 +118,7 @@ protected Map> getTokenFilters() { filters.put("soranistem", StemmerTokenFilterFactory.class); filters.put("spanishlightstem", StemmerTokenFilterFactory.class); filters.put("swedishlightstem", StemmerTokenFilterFactory.class); + filters.put("swedishminimalstem", Void.class); filters.put("stemmeroverride", StemmerOverrideTokenFilterFactory.class); filters.put("kstem", KStemTokenFilterFactory.class); filters.put("synonym", SynonymTokenFilterFactory.class); @@ -242,7 +244,7 @@ protected Map> getPreConfiguredTokenizers() { tokenizers.put("keyword", null); tokenizers.put("lowercase", Void.class); tokenizers.put("classic", null); - tokenizers.put("uax_url_email", org.apache.lucene.analysis.standard.UAX29URLEmailTokenizerFactory.class); + tokenizers.put("uax_url_email", org.apache.lucene.analysis.email.UAX29URLEmailTokenizerFactory.class); tokenizers.put("path_hierarchy", null); tokenizers.put("letter", null); tokenizers.put("whitespace", null); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java index 0b0beea41751c..35915af8f263d 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java @@ -107,11 +107,15 @@ public void setup() { // parsed queries for "text_shingle_unigram:(foo bar baz)" with query parsers // that ignores position length attribute expectedQueryWithUnigram = new BooleanQuery.Builder().add( - new SynonymQuery(new Term("text_shingle_unigram", "foo"), new Term("text_shingle_unigram", "foo bar")), + new SynonymQuery.Builder("text_shingle_unigram").addTerm(new Term("text_shingle_unigram", "foo")) + .addTerm(new Term("text_shingle_unigram", "foo bar")) + .build(), BooleanClause.Occur.SHOULD ) .add( - new SynonymQuery(new Term("text_shingle_unigram", "bar"), new Term("text_shingle_unigram", "bar baz")), + new SynonymQuery.Builder("text_shingle_unigram").addTerm(new Term("text_shingle_unigram", "bar")) + .addTerm(new Term("text_shingle_unigram", "bar baz")) + .build(), BooleanClause.Occur.SHOULD ) .add(new TermQuery(new Term("text_shingle_unigram", "baz")), BooleanClause.Occur.SHOULD) diff --git a/modules/lang-expression/licenses/lucene-expressions-8.10.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.10.1.jar.sha1 deleted file mode 100644 index f327cbcb6f8e6..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24932a4be7064a99126d80776718845b356abae0 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..21edcc44b664e --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 @@ -0,0 +1 @@ +0a3d818d6f6fb113831ed34553b24763fbda1e84 \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java index a6fcd7a1978e4..1c3dc69359952 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java @@ -37,7 +37,6 @@ import org.apache.lucene.expressions.js.JavascriptCompiler; import org.apache.lucene.expressions.js.VariableContext; import org.apache.lucene.search.DoubleValuesSource; -import org.apache.lucene.search.SortField; import org.opensearch.SpecialPermission; import org.opensearch.common.Nullable; import org.opensearch.index.fielddata.IndexFieldData; @@ -263,7 +262,7 @@ private static NumberSortScript.LeafFactory newSortScript(Expression expr, Searc for (String variable : expr.variables) { try { if (variable.equals("_score")) { - bindings.add(new SortField("_score", SortField.Type.SCORE)); + bindings.add("_score", DoubleValuesSource.SCORES); needsScores = true; } else if (vars != null && vars.containsKey(variable)) { bindFromParams(vars, bindings, variable); @@ -320,7 +319,7 @@ private static AggregationScript.LeafFactory newAggregationScript( for (String variable : expr.variables) { try { if (variable.equals("_score")) { - bindings.add(new SortField("_score", SortField.Type.SCORE)); + bindings.add("_score", DoubleValuesSource.SCORES); needsScores = true; } else if (variable.equals("_value")) { specialValue = new ReplaceableConstDoubleValueSource(); @@ -393,7 +392,7 @@ private static ScoreScript.LeafFactory newScoreScript(Expression expr, SearchLoo for (String variable : expr.variables) { try { if (variable.equals("_score")) { - bindings.add(new SortField("_score", SortField.Type.SCORE)); + bindings.add("_score", DoubleValuesSource.SCORES); needsScores = true; } else if (variable.equals("_value")) { specialValue = new ReplaceableConstDoubleValueSource(); diff --git a/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy b/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy index d3fa7589f092a..0c61624ca4fd7 100644 --- a/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy @@ -42,4 +42,5 @@ grant { permission org.opensearch.script.ClassPermission "java.lang.Math"; permission org.opensearch.script.ClassPermission "org.apache.lucene.util.MathUtil"; permission org.opensearch.script.ClassPermission "org.apache.lucene.util.SloppyMath"; + permission org.opensearch.script.ClassPermission "org.apache.lucene.expressions.js.ExpressionMath"; }; diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java index 7bf102584a379..7394993448bbf 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java @@ -44,6 +44,10 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -52,10 +56,6 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java index 037b486df956d..786791314692d 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java @@ -38,6 +38,9 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -47,9 +50,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.Strings; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; import org.opensearch.common.xcontent.XContentBuilder; diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java index 2e7411743b15d..be957146da21d 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.similarities.Similarity; @@ -409,6 +410,11 @@ public static final class LateParsingQuery extends Query { this.similarity = similarity; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public Query rewrite(IndexReader reader) throws IOException { Query rewritten = super.rewrite(reader); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java index 14e7973ec0c2d..0aa8318e7c191 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java @@ -34,11 +34,11 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; @@ -56,7 +56,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.Set; final class PercolateQuery extends Query implements Accountable { @@ -112,8 +111,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo final Weight verifiedMatchesWeight = verifiedMatchesQuery.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, boost); final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, boost); return new Weight(this) { - @Override - public void extractTerms(Set set) {} @Override public Explanation explain(LeafReaderContext leafReaderContext, int docId) throws IOException { @@ -245,6 +242,11 @@ Query getVerifiedMatchesQuery() { return verifiedMatchesQuery; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + // Comparing identity here to avoid being cached // Note that in theory if the same instance gets used multiple times it could still get cached, // however since we create a new query instance each time we this query this shouldn't happen and thus diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java index a8b0395dd84e0..fec38207582e7 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java @@ -43,9 +43,9 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValuesSource; import org.apache.lucene.search.MatchNoDocsQuery; @@ -279,7 +279,7 @@ Query percolateQuery( } Query filter = null; if (excludeNestedDocuments) { - filter = Queries.newNonNestedFilter(indexVersion); + filter = Queries.newNonNestedFilter(); } return new PercolateQuery(name, queryStore, documents, candidateQuery, searcher, filter, verifiedMatchesQuery); } diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index 244e3ef1beacc..a157a20f5f2c4 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -44,7 +44,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSetIterator; -import org.opensearch.Version; import org.opensearch.common.document.DocumentField; import org.opensearch.common.lucene.search.Queries; import org.opensearch.search.fetch.FetchContext; @@ -127,7 +126,7 @@ static class PercolateContext { this.percolateQuery = pq; this.singlePercolateQuery = singlePercolateQuery; IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher(); - Query nonNestedFilter = percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter(Version.CURRENT)); + Query nonNestedFilter = percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter()); Weight weight = percolatorIndexSearcher.createWeight(nonNestedFilter, ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer s = weight.scorer(percolatorIndexSearcher.getIndexReader().leaves().get(0)); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); @@ -148,7 +147,7 @@ Query filterNestedDocs(Query in) { if (rootDocsBySlot != null) { // Ensures that we filter out nested documents return new BooleanQuery.Builder().add(in, BooleanClause.Occur.MUST) - .add(Queries.newNonNestedFilter(Version.CURRENT), BooleanClause.Occur.FILTER) + .add(Queries.newNonNestedFilter(), BooleanClause.Occur.FILTER) .build(); } return in; diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java index 4a8ab8ba7d437..3a1b6734dd444 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java @@ -35,6 +35,8 @@ import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -48,8 +50,6 @@ import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.ByteRunAutomaton; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java index 4058548f052f8..e59aa227e3dc7 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java @@ -37,7 +37,6 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; @@ -60,10 +59,15 @@ import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.queries.CommonTermsQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanNotQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; @@ -74,6 +78,7 @@ import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; @@ -83,10 +88,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanNotQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; @@ -123,7 +124,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -1279,6 +1279,11 @@ public Query rewrite(IndexReader reader) throws IOException { return new TermQuery(term); } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "custom{" + field + "}"; @@ -1310,9 +1315,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo final IndexSearcher percolatorIndexSearcher = memoryIndex.createSearcher(); return new Weight(this) { - @Override - public void extractTerms(Set terms) {} - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { Scorer scorer = scorer(context); @@ -1386,6 +1388,11 @@ public boolean isCacheable(LeafReaderContext ctx) { }; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "control{" + field + "}"; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java index a4a6f9b6de254..c5049e21acc0c 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java @@ -42,6 +42,8 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -53,8 +55,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.opensearch.common.bytes.BytesArray; import org.opensearch.test.OpenSearchTestCase; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index 2c0aa593317b4..ca6f3a78b27d7 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -35,7 +35,6 @@ import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; @@ -43,9 +42,10 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java index 6a7198d55faee..509f483bcd253 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java @@ -33,7 +33,6 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LatLonPoint; @@ -45,6 +44,12 @@ import org.apache.lucene.queries.intervals.IntervalQuery; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; +import org.apache.lucene.queries.spans.SpanFirstQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanNotQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; +import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -63,11 +68,6 @@ import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.join.QueryBitSetProducer; import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.search.spans.SpanFirstQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanNotQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.Version; import org.opensearch.common.lucene.search.function.CombineFunction; @@ -824,13 +824,13 @@ public void testExtractQueryMetadata_disjunctionMaxQuery() { } public void testSynonymQuery() { - SynonymQuery query = new SynonymQuery(); + SynonymQuery query = new SynonymQuery.Builder("field").build(); Result result = analyze(query, Version.CURRENT); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(0)); assertThat(result.extractions.isEmpty(), is(true)); - query = new SynonymQuery(new Term("_field", "_value1"), new Term("_field", "_value2")); + query = new SynonymQuery.Builder("_field").addTerm(new Term("_field", "_value1")).addTerm(new Term("_field", "_value2")).build(); result = analyze(query, Version.CURRENT); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 8bc8c2c764e29..e5c084559f0a6 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -28,8 +28,6 @@ * under the License. */ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis - apply plugin: 'opensearch.yaml-rest-test' apply plugin: 'opensearch.internal-cluster-test' @@ -46,7 +44,7 @@ forbiddenApisMain { } dependencies { - api "org.apache.lucene:lucene-analyzers-icu:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" api "com.ibm.icu:icu4j:${versions.icu4j}" } diff --git a/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 b/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 deleted file mode 100644 index c24c69cf4b90f..0000000000000 --- a/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a4d00d5ec5febd252a6182e8b6e87a0a9821f81 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/icu4j-68.2.jar.sha1 b/plugins/analysis-icu/licenses/icu4j-68.2.jar.sha1 new file mode 100644 index 0000000000000..fcb3d79075099 --- /dev/null +++ b/plugins/analysis-icu/licenses/icu4j-68.2.jar.sha1 @@ -0,0 +1 @@ +76893e6000401ace133a65262254be0ebe556d46 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..a0df1a4b7cb2e --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 @@ -0,0 +1 @@ +a23a2c1c9baad61b6fb5380f072e41534c275875 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.10.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.10.1.jar.sha1 deleted file mode 100644 index cd11905d4531e..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a1eec256a25340ba5d432d2800f759db83eb5145 \ No newline at end of file diff --git a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/ICUCollationKeyFilter.java b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/ICUCollationKeyFilter.java index 3d4affb280b48..d7e097ce79798 100644 --- a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/ICUCollationKeyFilter.java +++ b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/ICUCollationKeyFilter.java @@ -35,7 +35,7 @@ import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.collation.ICUCollationDocValuesField; +import org.apache.lucene.analysis.icu.ICUCollationDocValuesField; import java.io.IOException; diff --git a/plugins/analysis-kuromoji/build.gradle b/plugins/analysis-kuromoji/build.gradle index 29ed05a9661dd..60738fb28b6d5 100644 --- a/plugins/analysis-kuromoji/build.gradle +++ b/plugins/analysis-kuromoji/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-kuromoji:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-kuromoji:${versions.lucene}" } restResources { diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..7eb72638fd6d2 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 @@ -0,0 +1 @@ +55f00abe01e51181d687c6bbceca8544f319b97d \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.10.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.10.1.jar.sha1 deleted file mode 100644 index 0cee3fd1fe9cf..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d9ff6329a9755bbdb7343452bf246e61ae9279d8 \ No newline at end of file diff --git a/plugins/analysis-nori/build.gradle b/plugins/analysis-nori/build.gradle index 1f0b73f334f88..3def7f9c6c60f 100644 --- a/plugins/analysis-nori/build.gradle +++ b/plugins/analysis-nori/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-nori:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-nori:${versions.lucene}" } restResources { diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..4d787ad04791f --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 @@ -0,0 +1 @@ +c5258e674ad9c189338b026710869c2955d8e11d \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.10.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.10.1.jar.sha1 deleted file mode 100644 index ec8d7c98c2d6f..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6e78aef6d1b709ed3e27dbc949255e078da08d41 \ No newline at end of file diff --git a/plugins/analysis-phonetic/build.gradle b/plugins/analysis-phonetic/build.gradle index 5d9e52307f389..ffa0466d43170 100644 --- a/plugins/analysis-phonetic/build.gradle +++ b/plugins/analysis-phonetic/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-phonetic:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-phonetic:${versions.lucene}" api "commons-codec:commons-codec:${versions.commonscodec}" } diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..8d915a28087e6 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 @@ -0,0 +1 @@ +437960fac10a9f8327fbd87be4e408eb140988b3 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.10.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.10.1.jar.sha1 deleted file mode 100644 index 14edf0533a00d..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c186bf6dd0c2fa6612ba9b0d785ff2d388d32a23 \ No newline at end of file diff --git a/plugins/analysis-smartcn/build.gradle b/plugins/analysis-smartcn/build.gradle index 4a389d60cac19..92f2774854715 100644 --- a/plugins/analysis-smartcn/build.gradle +++ b/plugins/analysis-smartcn/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-smartcn:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-smartcn:${versions.lucene}" } restResources { diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..d57bf6b3ab80d --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 @@ -0,0 +1 @@ +fe96c0b4609be5f7450773c2d7f099c51f4b1f7a \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.10.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.10.1.jar.sha1 deleted file mode 100644 index 5fc06ea596458..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ebda1884c24bb14ee451b98e7565c86966f8863d \ No newline at end of file diff --git a/plugins/analysis-stempel/build.gradle b/plugins/analysis-stempel/build.gradle index b03d33adc4207..d713f80172c58 100644 --- a/plugins/analysis-stempel/build.gradle +++ b/plugins/analysis-stempel/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-stempel:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-stempel:${versions.lucene}" } restResources { diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..ade92c37c5865 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 @@ -0,0 +1 @@ +b92e86dd451d225e68ee4abac5b00bf883b6ea00 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.10.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.10.1.jar.sha1 deleted file mode 100644 index bf2d58255a77e..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2a4bd86c96374cdc5acaf7c0efd5127f2fd3a519 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/build.gradle b/plugins/analysis-ukrainian/build.gradle index 9e4bb9c647859..386452fcf8aeb 100644 --- a/plugins/analysis-ukrainian/build.gradle +++ b/plugins/analysis-ukrainian/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-morfologik:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-morfologik:${versions.lucene}" api "org.carrot2:morfologik-stemming:2.1.8" api "org.carrot2:morfologik-fsa:2.1.8" api "ua.net.nlp:morfologik-ukrainian-search:4.9.1" diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..433ce1f0552c8 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 @@ -0,0 +1 @@ +048fddf601c6de7dd296f6da3f394544618f7cea \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.10.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.10.1.jar.sha1 deleted file mode 100644 index 6076c699bb7bf..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09de2e3fa72355228b2723f958dcb0ec1bc3f31a \ No newline at end of file diff --git a/plugins/mapper-annotated-text/src/test/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighterTests.java b/plugins/mapper-annotated-text/src/test/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighterTests.java index dd2ee23355c1e..fe75566c315a1 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighterTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighterTests.java @@ -136,7 +136,6 @@ private void assertHighlightOneDoc( noMatchSize, expectedPassages.length, name -> "text".equals(name), - Integer.MAX_VALUE, Integer.MAX_VALUE ); highlighter.setFieldMatcher((name) -> "text".equals(name)); diff --git a/plugins/store-smb/src/internalClusterTest/java/org/opensearch/index/store/SmbNIOFsTests.java b/plugins/store-smb/src/internalClusterTest/java/org/opensearch/index/store/SmbNIOFsTests.java index 6610d8f704ea3..eca9ca356a764 100644 --- a/plugins/store-smb/src/internalClusterTest/java/org/opensearch/index/store/SmbNIOFsTests.java +++ b/plugins/store-smb/src/internalClusterTest/java/org/opensearch/index/store/SmbNIOFsTests.java @@ -6,13 +6,34 @@ * compatible open source license. */ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + package org.opensearch.index.store; import org.opensearch.common.settings.Settings; -/** - * Index Settings Tests for NIO FileSystem as index store type. - */ public class SmbNIOFsTests extends AbstractAzureFsTestCase { @Override public Settings indexSettings() { diff --git a/server/build.gradle b/server/build.gradle index 3a11428ca7919..dcf4d43c60192 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -97,7 +97,7 @@ dependencies { // lucene api "org.apache.lucene:lucene-core:${versions.lucene}" - api "org.apache.lucene:lucene-analyzers-common:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-common:${versions.lucene}" api "org.apache.lucene:lucene-backward-codecs:${versions.lucene}" api "org.apache.lucene:lucene-grouping:${versions.lucene}" api "org.apache.lucene:lucene-highlighter:${versions.lucene}" diff --git a/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..2ed9dbcbe22f6 --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 @@ -0,0 +1 @@ +f78890829c3d6f15de48fdbc2c77ef4c0e3f005c \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.10.1.jar.sha1 b/server/licenses/lucene-analyzers-common-8.10.1.jar.sha1 deleted file mode 100644 index 685f94bcc6601..0000000000000 --- a/server/licenses/lucene-analyzers-common-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -23bb36a98d01100953674c56c20861b29b5a5175 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.10.1.jar.sha1 b/server/licenses/lucene-backward-codecs-8.10.1.jar.sha1 deleted file mode 100644 index 3191833511058..0000000000000 --- a/server/licenses/lucene-backward-codecs-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7399c32bc4ba7e37e14a9660ffd7962acf68a802 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..acf5a2b543199 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 @@ -0,0 +1 @@ +9fb48d0244799e18299449ee62459caab0728490 \ No newline at end of file diff --git a/server/licenses/lucene-core-8.10.1.jar.sha1 b/server/licenses/lucene-core-8.10.1.jar.sha1 deleted file mode 100644 index 77f85d74d6e6c..0000000000000 --- a/server/licenses/lucene-core-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deb78f6b21d29f964ab267ad59fafb58ef740101 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.0.0.jar.sha1 b/server/licenses/lucene-core-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..c874382fc8355 --- /dev/null +++ b/server/licenses/lucene-core-9.0.0.jar.sha1 @@ -0,0 +1 @@ +be679fd274f264e4e8b02bc032d2788cd4076ab4 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.10.1.jar.sha1 b/server/licenses/lucene-grouping-8.10.1.jar.sha1 deleted file mode 100644 index 82dd3ba35b0a2..0000000000000 --- a/server/licenses/lucene-grouping-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b91bb886d30c67a8f980d3bdfd6b7826a62d5e7 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.0.0.jar.sha1 b/server/licenses/lucene-grouping-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..18a81b5fa97ff --- /dev/null +++ b/server/licenses/lucene-grouping-9.0.0.jar.sha1 @@ -0,0 +1 @@ +27ebe235d427b4e392fabab9b6bfa09524ca7f8b \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.10.1.jar.sha1 b/server/licenses/lucene-highlighter-8.10.1.jar.sha1 deleted file mode 100644 index 901a99e05fa27..0000000000000 --- a/server/licenses/lucene-highlighter-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec4a2103cb300aab7e6142f1c7778dd505ecb8e2 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.0.0.jar.sha1 b/server/licenses/lucene-highlighter-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..5503495c2f86c --- /dev/null +++ b/server/licenses/lucene-highlighter-9.0.0.jar.sha1 @@ -0,0 +1 @@ +a3cb395c2e8c672e6eec951b2b02371a4a883f73 \ No newline at end of file diff --git a/server/licenses/lucene-join-8.10.1.jar.sha1 b/server/licenses/lucene-join-8.10.1.jar.sha1 deleted file mode 100644 index b7165475dac4f..0000000000000 --- a/server/licenses/lucene-join-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -aa368e9d11660dcfcfaab1a39dd871f05fa2b031 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.0.0.jar.sha1 b/server/licenses/lucene-join-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..dcbaa17875435 --- /dev/null +++ b/server/licenses/lucene-join-9.0.0.jar.sha1 @@ -0,0 +1 @@ +94a855b5d09a6601289aeaeba0f11d5539552590 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.10.1.jar.sha1 b/server/licenses/lucene-memory-8.10.1.jar.sha1 deleted file mode 100644 index ace60de0396b2..0000000000000 --- a/server/licenses/lucene-memory-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9de18bf605879647e964fd57ddf3fa6f85ca743e \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.0.0.jar.sha1 b/server/licenses/lucene-memory-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..157597ce9878f --- /dev/null +++ b/server/licenses/lucene-memory-9.0.0.jar.sha1 @@ -0,0 +1 @@ +2371c95031422bc1f501d43ffcc7311baed4b35b \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.10.1.jar.sha1 b/server/licenses/lucene-misc-8.10.1.jar.sha1 deleted file mode 100644 index ef9f37d080361..0000000000000 --- a/server/licenses/lucene-misc-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e9cca86ebbe010d375388c5a17216e2d2b2e76bb \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.0.0.jar.sha1 b/server/licenses/lucene-misc-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..ef031d34305a2 --- /dev/null +++ b/server/licenses/lucene-misc-9.0.0.jar.sha1 @@ -0,0 +1 @@ +25c6170f4fa2f707908dfb92fbafc76727f901e0 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.10.1.jar.sha1 b/server/licenses/lucene-queries-8.10.1.jar.sha1 deleted file mode 100644 index ee8ec29fd21f9..0000000000000 --- a/server/licenses/lucene-queries-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -21b70a0996e3408291514d99e3b03800d0bcd657 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.0.0.jar.sha1 b/server/licenses/lucene-queries-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..4b43c9e6b709a --- /dev/null +++ b/server/licenses/lucene-queries-9.0.0.jar.sha1 @@ -0,0 +1 @@ +87b4c7833d30895baf7091f9cb0db878e970b604 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.10.1.jar.sha1 b/server/licenses/lucene-queryparser-8.10.1.jar.sha1 deleted file mode 100644 index 3175b926c47ad..0000000000000 --- a/server/licenses/lucene-queryparser-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -087f52ee3f72f387b802c49a96e4a14b3b05dd21 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.0.0.jar.sha1 b/server/licenses/lucene-queryparser-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..62a4650a168c7 --- /dev/null +++ b/server/licenses/lucene-queryparser-9.0.0.jar.sha1 @@ -0,0 +1 @@ +bf13395ad2033bca3182fcbc83204e8ae1951945 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.10.1.jar.sha1 b/server/licenses/lucene-sandbox-8.10.1.jar.sha1 deleted file mode 100644 index 5941c170b1e80..0000000000000 --- a/server/licenses/lucene-sandbox-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -82b15ef61297e6d7b0c1f6c37c502d6b77a82f1e \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.0.0.jar.sha1 b/server/licenses/lucene-sandbox-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..4396efda1a83b --- /dev/null +++ b/server/licenses/lucene-sandbox-9.0.0.jar.sha1 @@ -0,0 +1 @@ +3c153a1dc1da3f98083cc932c9476df4b77b0ca5 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.10.1.jar.sha1 b/server/licenses/lucene-spatial-extras-8.10.1.jar.sha1 deleted file mode 100644 index 7eb235d9a1914..0000000000000 --- a/server/licenses/lucene-spatial-extras-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a3b6eac3e66bb1c6fb05c0cd980e5592adaf96b \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..a742934def499 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 @@ -0,0 +1 @@ +91535ef6512c45c7e2b113b04cab7738ee774893 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.10.1.jar.sha1 b/server/licenses/lucene-spatial3d-8.10.1.jar.sha1 deleted file mode 100644 index 177ceb41b4205..0000000000000 --- a/server/licenses/lucene-spatial3d-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -823a5e9d2fd3b5b668d305e0781d0e074e9f2ebb \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..0722795c260ad --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 @@ -0,0 +1 @@ +6b4ee47f218ed3d123c1b07671677a2e4f3c133b \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.10.1.jar.sha1 b/server/licenses/lucene-suggest-8.10.1.jar.sha1 deleted file mode 100644 index dae6bab002ef4..0000000000000 --- a/server/licenses/lucene-suggest-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -92d7e5a178d0df58e0b4d400755ac46bae3eea11 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.0.0.jar.sha1 b/server/licenses/lucene-suggest-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..7eb41e758379e --- /dev/null +++ b/server/licenses/lucene-suggest-9.0.0.jar.sha1 @@ -0,0 +1 @@ +a7d0e7279737114c039f5214082da948732096a6 \ No newline at end of file diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index 7fd2466647272..3f174dd0fdd6a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -56,6 +56,7 @@ public class IndexPrimaryRelocationIT extends OpenSearchIntegTestCase { private static final int RELOCATION_COUNT = 15; + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPrimaryRelocationWhileIndexing() throws Exception { internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(2, 3)); client().admin() diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java index c5b0d99e6d275..d17761f62eb53 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java @@ -32,9 +32,7 @@ package org.opensearch.recovery; -import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.carrotsearch.hppc.procedures.IntProcedure; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.util.English; import org.opensearch.action.ActionFuture; @@ -61,6 +59,7 @@ import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.ReplicationTracker; import org.opensearch.index.seqno.RetentionLease; import org.opensearch.index.shard.IndexEventListener; @@ -192,6 +191,7 @@ public void testSimpleRelocationNoIndexing() { assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testRelocationWhileIndexingRandom() throws Exception { int numberOfRelocations = scaledRandomIntBetween(1, rarely() ? 10 : 4); int numberOfReplicas = randomBoolean() ? 0 : 1; @@ -228,7 +228,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { } int numDocs = scaledRandomIntBetween(200, 2500); - try (BackgroundIndexer indexer = new BackgroundIndexer("test", "type1", client(), numDocs)) { + try (BackgroundIndexer indexer = new BackgroundIndexer("test", MapperService.SINGLE_MAPPING_NAME, client(), numDocs)) { logger.info("--> waiting for {} docs to be indexed ...", numDocs); waitForDocs(numDocs, indexer); logger.info("--> {} docs indexed", numDocs); @@ -285,20 +285,20 @@ public void testRelocationWhileIndexingRandom() throws Exception { for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { hitIds[hit] = hit + 1; } - IntHashSet set = IntHashSet.from(hitIds); + Set set = Arrays.stream(hitIds).boxed().collect(Collectors.toSet()); for (SearchHit hit : hits.getHits()) { int id = Integer.parseInt(hit.getId()); - if (!set.remove(id)) { + if (set.remove(id) == false) { logger.error("Extra id [{}]", id); } } - set.forEach((IntProcedure) value -> { logger.error("Missing id [{}]", value); }); + set.forEach(value -> logger.error("Missing id [{}]", value)); } assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); logger.info("--> DONE search test round {}", i + 1); } - if (!ranOnce) { + if (ranOnce == false) { fail(); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index f0fe5e4479b76..d1b3895ff40e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -3288,6 +3288,36 @@ public void testKeywordFieldHighlighting() throws IOException { ); } + public void testCopyToFields() throws Exception { + XContentBuilder b = jsonBuilder().startObject().startObject("properties"); + b.startObject("foo"); + { + b.field("type", "text"); + b.field("copy_to", "foo_copy"); + } + b.endObject(); + // If field is not stored, it is looked up in source (but source has only 'foo' + b.startObject("foo_copy").field("type", "text").field("store", true).endObject(); + b.endObject().endObject(); + prepareCreate("test").addMapping("type", b).get(); + + client().prepareIndex("test") + .setId("1") + .setSource(jsonBuilder().startObject().field("foo", "how now brown cow").endObject()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + SearchResponse response = client().prepareSearch() + .setQuery(matchQuery("foo_copy", "brown")) + .highlighter(new HighlightBuilder().field(new Field("foo_copy"))) + .get(); + + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo_copy"); + assertThat(field.getFragments().length, equalTo(1)); + assertThat(field.getFragments()[0].string(), equalTo("how now brown cow")); + } + public void testACopyFieldWithNestedQuery() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java index 5b2d87a6508fe..494aa4c0e6b88 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java @@ -331,8 +331,6 @@ public void testLimitOnExpandedFields() throws Exception { doAssertOneHitForQueryString("field_A0:foo"); // expanding to the limit should work doAssertOneHitForQueryString("field_A\\*:foo"); - // expanding two blocks to the limit still works - doAssertOneHitForQueryString("field_A\\*:foo field_B\\*:bar"); // adding a non-existing field on top shouldn't overshoot the limit doAssertOneHitForQueryString("field_A\\*:foo unmapped:something"); diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index 30ab282bf3d44..a97c4a0d13f12 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -238,11 +238,10 @@ public void testExplainValidateQueryTwoNodes() throws IOException { assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat( response.getQueryExplanation().get(0).getExplanation(), - equalTo( - "(MatchNoDocsQuery(\"failed [bar] query, caused by number_format_exception:[For input string: \"foo\"]\") " - + "| foo:foo | baz:foo)" - ) + containsString("MatchNoDocsQuery(\"failed [bar] query, caused by number_format_exception:[For input string: \"foo\"]\")") ); + assertThat(response.getQueryExplanation().get(0).getExplanation(), containsString("foo:foo")); + assertThat(response.getQueryExplanation().get(0).getExplanation(), containsString("baz:foo")); assertThat(response.getQueryExplanation().get(0).getError(), nullValue()); } } diff --git a/server/src/main/java/org/apache/lucene/misc/search/similarity/LegacyBM25Similarity.java b/server/src/main/java/org/apache/lucene/misc/search/similarity/LegacyBM25Similarity.java new file mode 100644 index 0000000000000..3b812e1c70368 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/misc/search/similarity/LegacyBM25Similarity.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.misc.search.similarity; + +import org.apache.lucene.index.FieldInvertState; +import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.TermStatistics; +import org.apache.lucene.search.similarities.BM25Similarity; +import org.apache.lucene.search.similarities.Similarity; + +/** + * Similarity that behaves like {@link BM25Similarity} while also applying the k1+1 factor to the + * numerator of the scoring formula + * + * @see BM25Similarity + * @deprecated {@link BM25Similarity} should be used instead + */ +@Deprecated +public final class LegacyBM25Similarity extends Similarity { + + private final BM25Similarity bm25Similarity; + + /** + * BM25 with these default values: + * + *
    + *
  • {@code k1 = 1.2} + *
  • {@code b = 0.75} + *
  • {@code discountOverlaps = true} + *
+ */ + public LegacyBM25Similarity() { + this.bm25Similarity = new BM25Similarity(); + } + + /** + * BM25 with the supplied parameter values. + * + * @param k1 Controls non-linear term frequency normalization (saturation). + * @param b Controls to what degree document length normalizes tf values. + * @throws IllegalArgumentException if {@code k1} is infinite or negative, or if {@code b} is not + * within the range {@code [0..1]} + */ + public LegacyBM25Similarity(float k1, float b) { + this.bm25Similarity = new BM25Similarity(k1, b); + } + + /** + * BM25 with the supplied parameter values. + * + * @param k1 Controls non-linear term frequency normalization (saturation). + * @param b Controls to what degree document length normalizes tf values. + * @param discountOverlaps True if overlap tokens (tokens with a position of increment of zero) + * are discounted from the document's length. + * @throws IllegalArgumentException if {@code k1} is infinite or negative, or if {@code b} is not + * within the range {@code [0..1]} + */ + public LegacyBM25Similarity(float k1, float b, boolean discountOverlaps) { + this.bm25Similarity = new BM25Similarity(k1, b, discountOverlaps); + } + + @Override + public long computeNorm(FieldInvertState state) { + return bm25Similarity.computeNorm(state); + } + + @Override + public SimScorer scorer(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) { + return bm25Similarity.scorer(boost * (1 + bm25Similarity.getK1()), collectionStats, termStats); + } + + /** + * Returns the k1 parameter + * + * @see #LegacyBM25Similarity(float, float) + */ + public final float getK1() { + return bm25Similarity.getK1(); + } + + /** + * Returns the b parameter + * + * @see #LegacyBM25Similarity(float, float) + */ + public final float getB() { + return bm25Similarity.getB(); + } + + /** + * Returns true if overlap tokens are discounted from the document's length. + * + * @see #LegacyBM25Similarity(float, float, boolean) + */ + public boolean getDiscountOverlaps() { + return bm25Similarity.getDiscountOverlaps(); + } + + @Override + public String toString() { + return bm25Similarity.toString(); + } +} diff --git a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java index 82778d31f6c2c..a2c59de7832d4 100644 --- a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java @@ -39,6 +39,7 @@ import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; @@ -138,6 +139,13 @@ public boolean isCacheable(LeafReaderContext ctx) { }; } + @Override + public void visit(QueryVisitor visitor) { + if (visitor.acceptField(fieldName)) { + visitor.visitLeaf(this); + } + } + @Override public String toString(String field) { return "BinaryDocValuesRangeQuery(fieldName=" + field + ",from=" + originalFrom + ",to=" + originalTo + ")"; diff --git a/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java index 12c51d951c6b2..ac279d6882634 100644 --- a/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java @@ -34,16 +34,16 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermStates; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanWeight; +import org.apache.lucene.queries.spans.Spans; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanWeight; -import org.apache.lucene.search.spans.Spans; import java.io.IOException; import java.util.Collections; import java.util.Map; -import java.util.Set; /** * A {@link SpanQuery} that matches no documents. @@ -57,6 +57,11 @@ public SpanMatchNoDocsQuery(String field, String reason) { this.reason = reason; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String getField() { return field; @@ -88,9 +93,6 @@ public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) { return null; } - @Override - public void extractTerms(Set terms) {} - @Override public boolean isCacheable(LeafReaderContext ctx) { return true; diff --git a/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java index f5fe33f1f95e1..fb22eb583d9e1 100644 --- a/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -35,14 +35,15 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.search.uhighlight.UnifiedHighlighter.HighlightFlag; import org.apache.lucene.util.BytesRef; import org.opensearch.common.CheckedSupplier; import org.opensearch.common.Nullable; @@ -77,7 +78,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { private final Locale breakIteratorLocale; private final int noMatchSize; private final FieldHighlighter fieldHighlighter; - private final int keywordIgnoreAbove; private final int maxAnalyzedOffset; /** @@ -97,7 +97,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { * @param noMatchSize The size of the text that should be returned when no highlighting can be performed. * @param maxPassages the maximum number of passes to highlight * @param fieldMatcher decides which terms should be highlighted - * @param keywordIgnoreAbove if the field's value is longer than this we'll skip it * @param maxAnalyzedOffset if the field is more than this long we'll refuse to use the ANALYZED * offset source for it because it'd be super slow */ @@ -114,7 +113,6 @@ public CustomUnifiedHighlighter( int noMatchSize, int maxPassages, Predicate fieldMatcher, - int keywordIgnoreAbove, int maxAnalyzedOffset ) throws IOException { super(searcher, analyzer); @@ -126,7 +124,6 @@ public CustomUnifiedHighlighter( this.field = field; this.noMatchSize = noMatchSize; this.setFieldMatcher(fieldMatcher); - this.keywordIgnoreAbove = keywordIgnoreAbove; this.maxAnalyzedOffset = maxAnalyzedOffset; fieldHighlighter = getFieldHighlighter(field, query, extractTerms(query), maxPassages); } @@ -144,9 +141,6 @@ public Snippet[] highlightField(LeafReader reader, int docId, CheckedSupplier keywordIgnoreAbove) { - return null; // skip highlighting keyword terms that were ignored during indexing - } if ((offsetSource == OffsetSource.ANALYSIS) && (fieldValueLength > maxAnalyzedOffset)) { throw new IllegalArgumentException( "The length of [" @@ -266,4 +260,12 @@ protected OffsetSource getOffsetSource(String field) { return offsetSource; } + /** Customize the highlighting flags to use by field. */ + @Override + protected Set getFlags(String field) { + final Set flags = super.getFlags(field); + // Change the defaults introduced by https://issues.apache.org/jira/browse/LUCENE-9431 + flags.remove(HighlightFlag.WEIGHT_MATCHES); + return flags; + } } diff --git a/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index 8e71aa5a0dce6..ac688f15cda01 100644 --- a/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MultiPhraseQuery; @@ -42,7 +43,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.index.search.OpenSearchToParentBlockJoinQuery; diff --git a/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java b/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java index 7b7841cc4265c..1b4f31892c7f8 100644 --- a/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java +++ b/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java @@ -127,4 +127,9 @@ public void clear(int i) { public void clear(int startIndex, int endIndex) { throw new UnsupportedOperationException("not implemented"); } + + @Override + public boolean getAndSet(int i) { + throw new UnsupportedOperationException("not implemented"); + } } diff --git a/server/src/main/java/org/apache/lucene/util/SPIClassIterator.java b/server/src/main/java/org/apache/lucene/util/SPIClassIterator.java new file mode 100644 index 0000000000000..1480c9aeeb2d8 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/SPIClassIterator.java @@ -0,0 +1,186 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.apache.lucene.util; + +import java.io.IOException; +import java.io.InputStream; +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Enumeration; +import java.util.Iterator; +import java.util.Locale; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.ServiceConfigurationError; + +/** + * Helper class for loading SPI classes from classpath (META-INF files). + * This is a light impl of {@link java.util.ServiceLoader} but is guaranteed to + * be bug-free regarding classpath order and does not instantiate or initialize + * the classes found. + */ +@SuppressForbidden(reason = "Taken from Lucene") +public final class SPIClassIterator implements Iterator> { + private static final String META_INF_SERVICES = "META-INF/services/"; + + private final Class clazz; + private final ClassLoader loader; + private final Enumeration profilesEnum; + private Iterator linesIterator; + + /** Creates a new SPI iterator to lookup services of type {@code clazz} using + * the same {@link ClassLoader} as the argument. */ + public static SPIClassIterator get(Class clazz) { + return new SPIClassIterator<>(clazz, Objects.requireNonNull(clazz.getClassLoader(), () -> clazz + " has no classloader.")); + } + + /** Creates a new SPI iterator to lookup services of type {@code clazz} using the given classloader. */ + public static SPIClassIterator get(Class clazz, ClassLoader loader) { + return new SPIClassIterator<>(clazz, loader); + } + + /** + * Utility method to check if some class loader is a (grand-)parent of or the same as another one. + * This means the child will be able to load all classes from the parent, too. + *

+ * If caller's codesource doesn't have enough permissions to do the check, {@code false} is returned + * (this is fine, because if we get a {@code SecurityException} it is for sure no parent). + */ + public static boolean isParentClassLoader(final ClassLoader parent, final ClassLoader child) { + try { + ClassLoader cl = child; + while (cl != null) { + if (cl == parent) { + return true; + } + cl = cl.getParent(); + } + return false; + } catch (SecurityException se) { + return false; + } + } + + private SPIClassIterator(Class clazz, ClassLoader loader) { + this.clazz = Objects.requireNonNull(clazz, "clazz"); + this.loader = Objects.requireNonNull(loader, "loader"); + try { + final String fullName = META_INF_SERVICES + clazz.getName(); + this.profilesEnum = loader.getResources(fullName); + } catch (IOException ioe) { + throw new ServiceConfigurationError("Error loading SPI profiles for type " + clazz.getName() + " from classpath", ioe); + } + this.linesIterator = Collections.emptySet().iterator(); + } + + private boolean loadNextProfile() { + ArrayList lines = null; + while (profilesEnum.hasMoreElements()) { + if (lines != null) { + lines.clear(); + } else { + lines = new ArrayList<>(); + } + final URL url = profilesEnum.nextElement(); + try { + final InputStream in = url.openStream(); + boolean success = false; + try { + final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); + String line; + while ((line = reader.readLine()) != null) { + final int pos = line.indexOf('#'); + if (pos >= 0) { + line = line.substring(0, pos); + } + line = line.trim(); + if (line.length() > 0) { + lines.add(line); + } + } + success = true; + } finally { + if (success) { + IOUtils.close(in); + } else { + IOUtils.closeWhileHandlingException(in); + } + } + } catch (IOException ioe) { + throw new ServiceConfigurationError("Error loading SPI class list from URL: " + url, ioe); + } + if (lines.isEmpty() == false) { + this.linesIterator = lines.iterator(); + return true; + } + } + return false; + } + + @Override + public boolean hasNext() { + return linesIterator.hasNext() || loadNextProfile(); + } + + @Override + public Class next() { + // hasNext() implicitely loads the next profile, so it is essential to call this here! + if (hasNext() == false) { + throw new NoSuchElementException(); + } + assert linesIterator.hasNext(); + final String c = linesIterator.next(); + try { + // don't initialize the class (pass false as 2nd parameter): + return Class.forName(c, false, loader).asSubclass(clazz); + } catch (ClassNotFoundException cnfe) { + throw new ServiceConfigurationError( + String.format( + Locale.ROOT, + "An SPI class of type %s with classname %s does not exist, " + "please fix the file '%s%1$s' in your classpath.", + clazz.getName(), + c, + META_INF_SERVICES + ) + ); + } + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + +} diff --git a/server/src/main/java/org/apache/lucene/util/packed/XPacked64.java b/server/src/main/java/org/apache/lucene/util/packed/XPacked64.java new file mode 100644 index 0000000000000..d811b245606ba --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/packed/XPacked64.java @@ -0,0 +1,317 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.apache.lucene.util.packed; + +import java.io.IOException; +import java.util.Arrays; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.util.RamUsageEstimator; + +/** + * Forked from Lucene 8.x; removed in Lucene 9.0 + * + * @todo further investigate a better alternative + * + * Space optimized random access capable array of values with a fixed number of bits/value. Values + * are packed contiguously. + * + *

The implementation strives to perform as fast as possible under the constraint of contiguous + * bits, by avoiding expensive operations. This comes at the cost of code clarity. + * + *

Technical details: This implementation is a refinement of a non-branching version. The + * non-branching get and set methods meant that 2 or 4 atomics in the underlying array were always + * accessed, even for the cases where only 1 or 2 were needed. Even with caching, this had a + * detrimental effect on performance. Related to this issue, the old implementation used lookup + * tables for shifts and masks, which also proved to be a bit slower than calculating the shifts and + * masks on the fly. See https://issues.apache.org/jira/browse/LUCENE-4062 for details. + */ +class XPacked64 extends XPackedInts.MutableImpl { + static final int BLOCK_SIZE = 64; // 32 = int, 64 = long + static final int BLOCK_BITS = 6; // The #bits representing BLOCK_SIZE + static final int MOD_MASK = BLOCK_SIZE - 1; // x % BLOCK_SIZE + + /** Values are stores contiguously in the blocks array. */ + private final long[] blocks; + /** A right-aligned mask of width BitsPerValue used by {@link #get(int)}. */ + private final long maskRight; + /** Optimization: Saves one lookup in {@link #get(int)}. */ + private final int bpvMinusBlockSize; + + /** + * Creates an array with the internal structures adjusted for the given limits and initialized to + * 0. + * + * @param valueCount the number of elements. + * @param bitsPerValue the number of bits available for any given value. + */ + public XPacked64(int valueCount, int bitsPerValue) { + super(valueCount, bitsPerValue); + final PackedInts.Format format = PackedInts.Format.PACKED; + final int longCount = format.longCount(PackedInts.VERSION_CURRENT, valueCount, bitsPerValue); + this.blocks = new long[longCount]; + maskRight = ~0L << (BLOCK_SIZE - bitsPerValue) >>> (BLOCK_SIZE - bitsPerValue); + bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE; + } + + /** + * Creates an array with content retrieved from the given DataInput. + * + * @param in a DataInput, positioned at the start of Packed64-content. + * @param valueCount the number of elements. + * @param bitsPerValue the number of bits available for any given value. + * @throws java.io.IOException if the values for the backing array could not be retrieved. + */ + public XPacked64(int packedIntsVersion, DataInput in, int valueCount, int bitsPerValue) throws IOException { + super(valueCount, bitsPerValue); + final PackedInts.Format format = PackedInts.Format.PACKED; + final long byteCount = format.byteCount(packedIntsVersion, valueCount, bitsPerValue); // to know how much to read + final int longCount = format.longCount(PackedInts.VERSION_CURRENT, valueCount, bitsPerValue); // to size the array + blocks = new long[longCount]; + // read as many longs as we can + for (int i = 0; i < byteCount / 8; ++i) { + blocks[i] = in.readLong(); + } + final int remaining = (int) (byteCount % 8); + if (remaining != 0) { + // read the last bytes + long lastLong = 0; + for (int i = 0; i < remaining; ++i) { + lastLong |= (in.readByte() & 0xFFL) << (56 - i * 8); + } + blocks[blocks.length - 1] = lastLong; + } + maskRight = ~0L << (BLOCK_SIZE - bitsPerValue) >>> (BLOCK_SIZE - bitsPerValue); + bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE; + } + + /** + * @param index the position of the value. + * @return the value at the given index. + */ + @Override + public long get(final int index) { + // The abstract index in a bit stream + final long majorBitPos = (long) index * bitsPerValue; + // The index in the backing long-array + final int elementPos = (int) (majorBitPos >>> BLOCK_BITS); + // The number of value-bits in the second long + final long endBits = (majorBitPos & MOD_MASK) + bpvMinusBlockSize; + + if (endBits <= 0) { // Single block + return (blocks[elementPos] >>> -endBits) & maskRight; + } + // Two blocks + return ((blocks[elementPos] << endBits) | (blocks[elementPos + 1] >>> (BLOCK_SIZE - endBits))) & maskRight; + } + + @Override + public int get(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + assert off + len <= arr.length; + + final int originalIndex = index; + final PackedInts.Decoder decoder = BulkOperation.of(PackedInts.Format.PACKED, bitsPerValue); + + // go to the next block where the value does not span across two blocks + final int offsetInBlocks = index % decoder.longValueCount(); + if (offsetInBlocks != 0) { + for (int i = offsetInBlocks; i < decoder.longValueCount() && len > 0; ++i) { + arr[off++] = get(index++); + --len; + } + if (len == 0) { + return index - originalIndex; + } + } + + // bulk get + assert index % decoder.longValueCount() == 0; + int blockIndex = (int) (((long) index * bitsPerValue) >>> BLOCK_BITS); + assert (((long) index * bitsPerValue) & MOD_MASK) == 0; + final int iterations = len / decoder.longValueCount(); + decoder.decode(blocks, blockIndex, arr, off, iterations); + final int gotValues = iterations * decoder.longValueCount(); + index += gotValues; + len -= gotValues; + assert len >= 0; + + if (index > originalIndex) { + // stay at the block boundary + return index - originalIndex; + } else { + // no progress so far => already at a block boundary but no full block to get + assert index == originalIndex; + return super.get(index, arr, off, len); + } + } + + @Override + public void set(final int index, final long value) { + // The abstract index in a contiguous bit stream + final long majorBitPos = (long) index * bitsPerValue; + // The index in the backing long-array + final int elementPos = (int) (majorBitPos >>> BLOCK_BITS); // / BLOCK_SIZE + // The number of value-bits in the second long + final long endBits = (majorBitPos & MOD_MASK) + bpvMinusBlockSize; + + if (endBits <= 0) { // Single block + blocks[elementPos] = blocks[elementPos] & ~(maskRight << -endBits) | (value << -endBits); + return; + } + // Two blocks + blocks[elementPos] = blocks[elementPos] & ~(maskRight >>> endBits) | (value >>> endBits); + blocks[elementPos + 1] = blocks[elementPos + 1] & (~0L >>> endBits) | (value << (BLOCK_SIZE - endBits)); + } + + @Override + public int set(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + assert off + len <= arr.length; + + final int originalIndex = index; + final PackedInts.Encoder encoder = BulkOperation.of(PackedInts.Format.PACKED, bitsPerValue); + + // go to the next block where the value does not span across two blocks + final int offsetInBlocks = index % encoder.longValueCount(); + if (offsetInBlocks != 0) { + for (int i = offsetInBlocks; i < encoder.longValueCount() && len > 0; ++i) { + set(index++, arr[off++]); + --len; + } + if (len == 0) { + return index - originalIndex; + } + } + + // bulk set + assert index % encoder.longValueCount() == 0; + int blockIndex = (int) (((long) index * bitsPerValue) >>> BLOCK_BITS); + assert (((long) index * bitsPerValue) & MOD_MASK) == 0; + final int iterations = len / encoder.longValueCount(); + encoder.encode(arr, off, blocks, blockIndex, iterations); + final int setValues = iterations * encoder.longValueCount(); + index += setValues; + len -= setValues; + assert len >= 0; + + if (index > originalIndex) { + // stay at the block boundary + return index - originalIndex; + } else { + // no progress so far => already at a block boundary but no full block to get + assert index == originalIndex; + return super.set(index, arr, off, len); + } + } + + @Override + public String toString() { + return "Packed64(bitsPerValue=" + bitsPerValue + ",size=" + size() + ",blocks=" + blocks.length + ")"; + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.alignObjectSize( + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 3 * Integer.BYTES // bpvMinusBlockSize,valueCount,bitsPerValue + + Long.BYTES // maskRight + + RamUsageEstimator.NUM_BYTES_OBJECT_REF + ) // blocks ref + + RamUsageEstimator.sizeOf(blocks); + } + + @Override + public void fill(int fromIndex, int toIndex, long val) { + assert PackedInts.unsignedBitsRequired(val) <= getBitsPerValue(); + assert fromIndex <= toIndex; + + // minimum number of values that use an exact number of full blocks + final int nAlignedValues = 64 / gcd(64, bitsPerValue); + final int span = toIndex - fromIndex; + if (span <= 3 * nAlignedValues) { + // there needs be at least 2 * nAlignedValues aligned values for the + // block approach to be worth trying + super.fill(fromIndex, toIndex, val); + return; + } + + // fill the first values naively until the next block start + final int fromIndexModNAlignedValues = fromIndex % nAlignedValues; + if (fromIndexModNAlignedValues != 0) { + for (int i = fromIndexModNAlignedValues; i < nAlignedValues; ++i) { + set(fromIndex++, val); + } + } + assert fromIndex % nAlignedValues == 0; + + // compute the long[] blocks for nAlignedValues consecutive values and + // use them to set as many values as possible without applying any mask + // or shift + final int nAlignedBlocks = (nAlignedValues * bitsPerValue) >> 6; + final long[] nAlignedValuesBlocks; + { + XPacked64 values = new XPacked64(nAlignedValues, bitsPerValue); + for (int i = 0; i < nAlignedValues; ++i) { + values.set(i, val); + } + nAlignedValuesBlocks = values.blocks; + assert nAlignedBlocks <= nAlignedValuesBlocks.length; + } + final int startBlock = (int) (((long) fromIndex * bitsPerValue) >>> 6); + final int endBlock = (int) (((long) toIndex * bitsPerValue) >>> 6); + for (int block = startBlock; block < endBlock; ++block) { + final long blockValue = nAlignedValuesBlocks[block % nAlignedBlocks]; + blocks[block] = blockValue; + } + + // fill the gap + for (int i = (int) (((long) endBlock << 6) / bitsPerValue); i < toIndex; ++i) { + set(i, val); + } + } + + private static int gcd(int a, int b) { + if (a < b) { + return gcd(b, a); + } else if (b == 0) { + return a; + } else { + return gcd(b, a % b); + } + } + + @Override + public void clear() { + Arrays.fill(blocks, 0L); + } +} diff --git a/server/src/main/java/org/apache/lucene/util/packed/XPacked64SingleBlock.java b/server/src/main/java/org/apache/lucene/util/packed/XPacked64SingleBlock.java new file mode 100644 index 0000000000000..ef7644c32a843 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/packed/XPacked64SingleBlock.java @@ -0,0 +1,574 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.lucene.util.packed; + +import java.io.IOException; +import java.util.Arrays; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.util.RamUsageEstimator; + +/** + * Forked from Lucene 8.x; removed in Lucene 9.0 + * + * @todo further investigate a better alternative + * + * This class is similar to {@link Packed64} except that it trades space for speed by ensuring that + * a single block needs to be read/written in order to read/write a value. + */ +abstract class XPacked64SingleBlock extends XPackedInts.MutableImpl { + + public static final int MAX_SUPPORTED_BITS_PER_VALUE = 32; + private static final int[] SUPPORTED_BITS_PER_VALUE = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 16, 21, 32 }; + + public static boolean isSupported(int bitsPerValue) { + return Arrays.binarySearch(SUPPORTED_BITS_PER_VALUE, bitsPerValue) >= 0; + } + + private static int requiredCapacity(int valueCount, int valuesPerBlock) { + return valueCount / valuesPerBlock + (valueCount % valuesPerBlock == 0 ? 0 : 1); + } + + final long[] blocks; + + XPacked64SingleBlock(int valueCount, int bitsPerValue) { + super(valueCount, bitsPerValue); + assert isSupported(bitsPerValue); + final int valuesPerBlock = 64 / bitsPerValue; + blocks = new long[requiredCapacity(valueCount, valuesPerBlock)]; + } + + @Override + public void clear() { + Arrays.fill(blocks, 0L); + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.alignObjectSize( + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Integer.BYTES // valueCount,bitsPerValue + + RamUsageEstimator.NUM_BYTES_OBJECT_REF + ) // blocks ref + + RamUsageEstimator.sizeOf(blocks); + } + + @Override + public int get(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + assert off + len <= arr.length; + + final int originalIndex = index; + + // go to the next block boundary + final int valuesPerBlock = 64 / bitsPerValue; + final int offsetInBlock = index % valuesPerBlock; + if (offsetInBlock != 0) { + for (int i = offsetInBlock; i < valuesPerBlock && len > 0; ++i) { + arr[off++] = get(index++); + --len; + } + if (len == 0) { + return index - originalIndex; + } + } + + // bulk get + assert index % valuesPerBlock == 0; + @SuppressWarnings("deprecation") + final PackedInts.Decoder decoder = BulkOperation.of(PackedInts.Format.PACKED_SINGLE_BLOCK, bitsPerValue); + assert decoder.longBlockCount() == 1; + assert decoder.longValueCount() == valuesPerBlock; + final int blockIndex = index / valuesPerBlock; + final int nblocks = (index + len) / valuesPerBlock - blockIndex; + decoder.decode(blocks, blockIndex, arr, off, nblocks); + final int diff = nblocks * valuesPerBlock; + index += diff; + len -= diff; + + if (index > originalIndex) { + // stay at the block boundary + return index - originalIndex; + } else { + // no progress so far => already at a block boundary but no full block to + // get + assert index == originalIndex; + return super.get(index, arr, off, len); + } + } + + @Override + public int set(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + assert off + len <= arr.length; + + final int originalIndex = index; + + // go to the next block boundary + final int valuesPerBlock = 64 / bitsPerValue; + final int offsetInBlock = index % valuesPerBlock; + if (offsetInBlock != 0) { + for (int i = offsetInBlock; i < valuesPerBlock && len > 0; ++i) { + set(index++, arr[off++]); + --len; + } + if (len == 0) { + return index - originalIndex; + } + } + + // bulk set + assert index % valuesPerBlock == 0; + @SuppressWarnings("deprecation") + final BulkOperation op = BulkOperation.of(PackedInts.Format.PACKED_SINGLE_BLOCK, bitsPerValue); + assert op.longBlockCount() == 1; + assert op.longValueCount() == valuesPerBlock; + final int blockIndex = index / valuesPerBlock; + final int nblocks = (index + len) / valuesPerBlock - blockIndex; + op.encode(arr, off, blocks, blockIndex, nblocks); + final int diff = nblocks * valuesPerBlock; + index += diff; + len -= diff; + + if (index > originalIndex) { + // stay at the block boundary + return index - originalIndex; + } else { + // no progress so far => already at a block boundary but no full block to + // set + assert index == originalIndex; + return super.set(index, arr, off, len); + } + } + + @Override + public void fill(int fromIndex, int toIndex, long val) { + assert fromIndex >= 0; + assert fromIndex <= toIndex; + assert PackedInts.unsignedBitsRequired(val) <= bitsPerValue; + + final int valuesPerBlock = 64 / bitsPerValue; + if (toIndex - fromIndex <= valuesPerBlock << 1) { + // there needs to be at least one full block to set for the block + // approach to be worth trying + super.fill(fromIndex, toIndex, val); + return; + } + + // set values naively until the next block start + int fromOffsetInBlock = fromIndex % valuesPerBlock; + if (fromOffsetInBlock != 0) { + for (int i = fromOffsetInBlock; i < valuesPerBlock; ++i) { + set(fromIndex++, val); + } + assert fromIndex % valuesPerBlock == 0; + } + + // bulk set of the inner blocks + final int fromBlock = fromIndex / valuesPerBlock; + final int toBlock = toIndex / valuesPerBlock; + assert fromBlock * valuesPerBlock == fromIndex; + + long blockValue = 0L; + for (int i = 0; i < valuesPerBlock; ++i) { + blockValue = blockValue | (val << (i * bitsPerValue)); + } + Arrays.fill(blocks, fromBlock, toBlock, blockValue); + + // fill the gap + for (int i = valuesPerBlock * toBlock; i < toIndex; ++i) { + set(i, val); + } + } + + @SuppressWarnings("deprecation") + protected PackedInts.Format getFormat() { + return PackedInts.Format.PACKED_SINGLE_BLOCK; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(bitsPerValue=" + bitsPerValue + ",size=" + size() + ",blocks=" + blocks.length + ")"; + } + + public static XPacked64SingleBlock create(DataInput in, int valueCount, int bitsPerValue) throws IOException { + XPacked64SingleBlock reader = create(valueCount, bitsPerValue); + for (int i = 0; i < reader.blocks.length; ++i) { + reader.blocks[i] = in.readLong(); + } + return reader; + } + + public static XPacked64SingleBlock create(int valueCount, int bitsPerValue) { + switch (bitsPerValue) { + case 1: + return new XPacked64SingleBlock1(valueCount); + case 2: + return new XPacked64SingleBlock2(valueCount); + case 3: + return new XPacked64SingleBlock3(valueCount); + case 4: + return new XPacked64SingleBlock4(valueCount); + case 5: + return new XPacked64SingleBlock5(valueCount); + case 6: + return new XPacked64SingleBlock6(valueCount); + case 7: + return new XPacked64SingleBlock7(valueCount); + case 8: + return new XPacked64SingleBlock8(valueCount); + case 9: + return new XPacked64SingleBlock9(valueCount); + case 10: + return new XPacked64SingleBlock10(valueCount); + case 12: + return new XPacked64SingleBlock12(valueCount); + case 16: + return new XPacked64SingleBlock16(valueCount); + case 21: + return new XPacked64SingleBlock21(valueCount); + case 32: + return new XPacked64SingleBlock32(valueCount); + default: + throw new IllegalArgumentException("Unsupported number of bits per value: " + 32); + } + } + + static class XPacked64SingleBlock1 extends XPacked64SingleBlock { + + XPacked64SingleBlock1(int valueCount) { + super(valueCount, 1); + } + + @Override + public long get(int index) { + final int o = index >>> 6; + final int b = index & 63; + final int shift = b << 0; + return (blocks[o] >>> shift) & 1L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 6; + final int b = index & 63; + final int shift = b << 0; + blocks[o] = (blocks[o] & ~(1L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock2 extends XPacked64SingleBlock { + + XPacked64SingleBlock2(int valueCount) { + super(valueCount, 2); + } + + @Override + public long get(int index) { + final int o = index >>> 5; + final int b = index & 31; + final int shift = b << 1; + return (blocks[o] >>> shift) & 3L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 5; + final int b = index & 31; + final int shift = b << 1; + blocks[o] = (blocks[o] & ~(3L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock3 extends XPacked64SingleBlock { + + XPacked64SingleBlock3(int valueCount) { + super(valueCount, 3); + } + + @Override + public long get(int index) { + final int o = index / 21; + final int b = index % 21; + final int shift = b * 3; + return (blocks[o] >>> shift) & 7L; + } + + @Override + public void set(int index, long value) { + final int o = index / 21; + final int b = index % 21; + final int shift = b * 3; + blocks[o] = (blocks[o] & ~(7L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock4 extends XPacked64SingleBlock { + + XPacked64SingleBlock4(int valueCount) { + super(valueCount, 4); + } + + @Override + public long get(int index) { + final int o = index >>> 4; + final int b = index & 15; + final int shift = b << 2; + return (blocks[o] >>> shift) & 15L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 4; + final int b = index & 15; + final int shift = b << 2; + blocks[o] = (blocks[o] & ~(15L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock5 extends XPacked64SingleBlock { + + XPacked64SingleBlock5(int valueCount) { + super(valueCount, 5); + } + + @Override + public long get(int index) { + final int o = index / 12; + final int b = index % 12; + final int shift = b * 5; + return (blocks[o] >>> shift) & 31L; + } + + @Override + public void set(int index, long value) { + final int o = index / 12; + final int b = index % 12; + final int shift = b * 5; + blocks[o] = (blocks[o] & ~(31L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock6 extends XPacked64SingleBlock { + + XPacked64SingleBlock6(int valueCount) { + super(valueCount, 6); + } + + @Override + public long get(int index) { + final int o = index / 10; + final int b = index % 10; + final int shift = b * 6; + return (blocks[o] >>> shift) & 63L; + } + + @Override + public void set(int index, long value) { + final int o = index / 10; + final int b = index % 10; + final int shift = b * 6; + blocks[o] = (blocks[o] & ~(63L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock7 extends XPacked64SingleBlock { + + XPacked64SingleBlock7(int valueCount) { + super(valueCount, 7); + } + + @Override + public long get(int index) { + final int o = index / 9; + final int b = index % 9; + final int shift = b * 7; + return (blocks[o] >>> shift) & 127L; + } + + @Override + public void set(int index, long value) { + final int o = index / 9; + final int b = index % 9; + final int shift = b * 7; + blocks[o] = (blocks[o] & ~(127L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock8 extends XPacked64SingleBlock { + + XPacked64SingleBlock8(int valueCount) { + super(valueCount, 8); + } + + @Override + public long get(int index) { + final int o = index >>> 3; + final int b = index & 7; + final int shift = b << 3; + return (blocks[o] >>> shift) & 255L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 3; + final int b = index & 7; + final int shift = b << 3; + blocks[o] = (blocks[o] & ~(255L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock9 extends XPacked64SingleBlock { + + XPacked64SingleBlock9(int valueCount) { + super(valueCount, 9); + } + + @Override + public long get(int index) { + final int o = index / 7; + final int b = index % 7; + final int shift = b * 9; + return (blocks[o] >>> shift) & 511L; + } + + @Override + public void set(int index, long value) { + final int o = index / 7; + final int b = index % 7; + final int shift = b * 9; + blocks[o] = (blocks[o] & ~(511L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock10 extends XPacked64SingleBlock { + + XPacked64SingleBlock10(int valueCount) { + super(valueCount, 10); + } + + @Override + public long get(int index) { + final int o = index / 6; + final int b = index % 6; + final int shift = b * 10; + return (blocks[o] >>> shift) & 1023L; + } + + @Override + public void set(int index, long value) { + final int o = index / 6; + final int b = index % 6; + final int shift = b * 10; + blocks[o] = (blocks[o] & ~(1023L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock12 extends XPacked64SingleBlock { + + XPacked64SingleBlock12(int valueCount) { + super(valueCount, 12); + } + + @Override + public long get(int index) { + final int o = index / 5; + final int b = index % 5; + final int shift = b * 12; + return (blocks[o] >>> shift) & 4095L; + } + + @Override + public void set(int index, long value) { + final int o = index / 5; + final int b = index % 5; + final int shift = b * 12; + blocks[o] = (blocks[o] & ~(4095L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock16 extends XPacked64SingleBlock { + + XPacked64SingleBlock16(int valueCount) { + super(valueCount, 16); + } + + @Override + public long get(int index) { + final int o = index >>> 2; + final int b = index & 3; + final int shift = b << 4; + return (blocks[o] >>> shift) & 65535L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 2; + final int b = index & 3; + final int shift = b << 4; + blocks[o] = (blocks[o] & ~(65535L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock21 extends XPacked64SingleBlock { + + XPacked64SingleBlock21(int valueCount) { + super(valueCount, 21); + } + + @Override + public long get(int index) { + final int o = index / 3; + final int b = index % 3; + final int shift = b * 21; + return (blocks[o] >>> shift) & 2097151L; + } + + @Override + public void set(int index, long value) { + final int o = index / 3; + final int b = index % 3; + final int shift = b * 21; + blocks[o] = (blocks[o] & ~(2097151L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock32 extends XPacked64SingleBlock { + + XPacked64SingleBlock32(int valueCount) { + super(valueCount, 32); + } + + @Override + public long get(int index) { + final int o = index >>> 1; + final int b = index & 1; + final int shift = b << 5; + return (blocks[o] >>> shift) & 4294967295L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 1; + final int b = index & 1; + final int shift = b << 5; + blocks[o] = (blocks[o] & ~(4294967295L << shift)) | (value << shift); + } + } +} diff --git a/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java b/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java new file mode 100644 index 0000000000000..9a277a7b5f2f4 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java @@ -0,0 +1,740 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.util.packed; + +import java.io.EOFException; +import java.io.IOException; +import java.util.Arrays; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.packed.PackedInts.Decoder; +import org.apache.lucene.util.packed.PackedInts.Encoder; +import org.apache.lucene.util.packed.PackedInts.Format; +import org.apache.lucene.util.packed.PackedInts.FormatAndBits; +import org.apache.lucene.util.packed.PackedInts.Reader; +import org.apache.lucene.util.packed.PackedInts.ReaderIterator; +import org.apache.lucene.util.packed.PackedInts.Writer; + +/** + * Forked from Lucene 8.x; removed in Lucene 8.9 + * + * Todo: further investigate a better alternative + * + * Simplistic compression for array of unsigned long values. Each value is {@code >= 0} and {@code + * <=} a specified maximum value. The values are stored as packed ints, with each value consuming a + * fixed number of bits. + */ +public class XPackedInts { + + /** At most 700% memory overhead, always select a direct implementation. */ + public static final float FASTEST = 7f; + + /** At most 50% memory overhead, always select a reasonably fast implementation. */ + public static final float FAST = 0.5f; + + /** At most 25% memory overhead. */ + public static final float DEFAULT = 0.25f; + + /** No memory overhead at all, but the returned implementation may be slow. */ + public static final float COMPACT = 0f; + + /** Default amount of memory to use for bulk operations. */ + public static final int DEFAULT_BUFFER_SIZE = 1024; // 1K + + public static final String CODEC_NAME = "PackedInts"; + public static final int VERSION_MONOTONIC_WITHOUT_ZIGZAG = 2; + public static final int VERSION_START = VERSION_MONOTONIC_WITHOUT_ZIGZAG; + public static final int VERSION_CURRENT = VERSION_MONOTONIC_WITHOUT_ZIGZAG; + + /** Check the validity of a version number. */ + public static void checkVersion(int version) { + if (version < VERSION_START) { + throw new IllegalArgumentException("Version is too old, should be at least " + VERSION_START + " (got " + version + ")"); + } else if (version > VERSION_CURRENT) { + throw new IllegalArgumentException("Version is too new, should be at most " + VERSION_CURRENT + " (got " + version + ")"); + } + } + + /** + * Try to find the {@link Format} and number of bits per value that would restore from disk the + * fastest reader whose overhead is less than acceptableOverheadRatio. + * + *

The acceptableOverheadRatio parameter makes sense for random-access {@link + * Reader}s. In case you only plan to perform sequential access on this stream later on, you + * should probably use {@link PackedInts#COMPACT}. + * + *

If you don't know how many values you are going to write, use valueCount = -1. + */ + public static FormatAndBits fastestFormatAndBits(int valueCount, int bitsPerValue, float acceptableOverheadRatio) { + if (valueCount == -1) { + valueCount = Integer.MAX_VALUE; + } + + acceptableOverheadRatio = Math.max(COMPACT, acceptableOverheadRatio); + acceptableOverheadRatio = Math.min(FASTEST, acceptableOverheadRatio); + float acceptableOverheadPerValue = acceptableOverheadRatio * bitsPerValue; // in bits + + int maxBitsPerValue = bitsPerValue + (int) acceptableOverheadPerValue; + + int actualBitsPerValue = -1; + + // rounded number of bits per value are usually the fastest + if (bitsPerValue <= 8 && maxBitsPerValue >= 8) { + actualBitsPerValue = 8; + } else if (bitsPerValue <= 16 && maxBitsPerValue >= 16) { + actualBitsPerValue = 16; + } else if (bitsPerValue <= 32 && maxBitsPerValue >= 32) { + actualBitsPerValue = 32; + } else if (bitsPerValue <= 64 && maxBitsPerValue >= 64) { + actualBitsPerValue = 64; + } else { + actualBitsPerValue = bitsPerValue; + } + + return new FormatAndBits(Format.PACKED, actualBitsPerValue); + } + + final static class XPackedWriter extends XWriter { + + boolean finished; + final PackedInts.Format format; + final BulkOperation encoder; + final byte[] nextBlocks; + final long[] nextValues; + final int iterations; + int off; + int written; + + XPackedWriter(PackedInts.Format format, DataOutput out, int valueCount, int bitsPerValue, int mem) { + super(out, valueCount, bitsPerValue); + this.format = format; + encoder = BulkOperation.of(format, bitsPerValue); + iterations = encoder.computeIterations(valueCount, mem); + nextBlocks = new byte[iterations * encoder.byteBlockCount()]; + nextValues = new long[iterations * encoder.byteValueCount()]; + off = 0; + written = 0; + finished = false; + } + + @Override + protected PackedInts.Format getFormat() { + return format; + } + + @Override + public void add(long v) throws IOException { + assert PackedInts.unsignedBitsRequired(v) <= bitsPerValue; + assert !finished; + if (valueCount != -1 && written >= valueCount) { + throw new EOFException("Writing past end of stream"); + } + nextValues[off++] = v; + if (off == nextValues.length) { + flush(); + } + ++written; + } + + @Override + public void finish() throws IOException { + assert !finished; + if (valueCount != -1) { + while (written < valueCount) { + add(0L); + } + } + flush(); + finished = true; + } + + private void flush() throws IOException { + encoder.encode(nextValues, 0, nextBlocks, 0, iterations); + final int blockCount = (int) format.byteCount(PackedInts.VERSION_CURRENT, off, bitsPerValue); + out.writeBytes(nextBlocks, blockCount); + Arrays.fill(nextValues, 0L); + off = 0; + } + + @Override + public int ord() { + return written - 1; + } + } + + /** + * A packed integer array that can be modified. + * + */ + public abstract static class Mutable extends Reader { + + /** + * @return the number of bits used to store any given value. Note: This does not imply that + * memory usage is {@code bitsPerValue * #values} as implementations are free to use + * non-space-optimal packing of bits. + */ + public abstract int getBitsPerValue(); + + /** + * Set the value at the given index in the array. + * + * @param index where the value should be positioned. + * @param value a value conforming to the constraints set by the array. + */ + public abstract void set(int index, long value); + + /** + * Bulk set: set at least one and at most len longs starting at off in + * arr into this mutable, starting at index. Returns the actual number + * of values that have been set. + */ + public int set(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < size(); + len = Math.min(len, size() - index); + assert off + len <= arr.length; + + for (int i = index, o = off, end = index + len; i < end; ++i, ++o) { + set(i, arr[o]); + } + return len; + } + + /** + * Fill the mutable from fromIndex (inclusive) to toIndex (exclusive) + * with val. + */ + public void fill(int fromIndex, int toIndex, long val) { + assert val <= maxValue(getBitsPerValue()); + assert fromIndex <= toIndex; + for (int i = fromIndex; i < toIndex; ++i) { + set(i, val); + } + } + + /** Sets all values to 0. */ + public void clear() { + fill(0, size(), 0); + } + + /** + * Save this mutable into out. Instantiating a reader from the generated data will + * return a reader with the same number of bits per value. + */ + public void save(DataOutput out) throws IOException { + XWriter writer = getWriterNoHeader(out, getFormat(), size(), getBitsPerValue(), DEFAULT_BUFFER_SIZE); + writer.writeHeader(); + for (int i = 0; i < size(); ++i) { + writer.add(get(i)); + } + writer.finish(); + } + + /** The underlying format. */ + Format getFormat() { + return Format.PACKED; + } + } + + /** + * A simple base for Readers that keeps track of valueCount and bitsPerValue. + * + */ + abstract static class ReaderImpl extends Reader { + protected final int valueCount; + + protected ReaderImpl(int valueCount) { + this.valueCount = valueCount; + } + + @Override + public abstract long get(int index); + + @Override + public final int size() { + return valueCount; + } + } + + abstract static class MutableImpl extends Mutable { + + protected final int valueCount; + protected final int bitsPerValue; + + protected MutableImpl(int valueCount, int bitsPerValue) { + this.valueCount = valueCount; + assert bitsPerValue > 0 && bitsPerValue <= 64 : "bitsPerValue=" + bitsPerValue; + this.bitsPerValue = bitsPerValue; + } + + @Override + public final int getBitsPerValue() { + return bitsPerValue; + } + + @Override + public final int size() { + return valueCount; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(valueCount=" + valueCount + ",bitsPerValue=" + bitsPerValue + ")"; + } + } + + /** A {@link Reader} which has all its values equal to 0 (bitsPerValue = 0). */ + public static final class NullReader extends Reader { + + private final int valueCount; + + /** Sole constructor. */ + public NullReader(int valueCount) { + this.valueCount = valueCount; + } + + @Override + public long get(int index) { + return 0; + } + + @Override + public int get(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + Arrays.fill(arr, off, off + len, 0); + return len; + } + + @Override + public int size() { + return valueCount; + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES); + } + } + + /** + * A write-once Writer. + * + */ + public abstract static class XWriter extends Writer { + protected XWriter(DataOutput out, int valueCount, int bitsPerValue) { + super(out, valueCount, bitsPerValue); + } + + void writeHeader() throws IOException { + assert valueCount != -1; + CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); + out.writeVInt(bitsPerValue); + out.writeVInt(valueCount); + out.writeVInt(getFormat().getId()); + } + } + + /** + * Get a {@link Decoder}. + * + * @param format the format used to store packed ints + * @param version the compatibility version + * @param bitsPerValue the number of bits per value + * @return a decoder + */ + public static Decoder getDecoder(Format format, int version, int bitsPerValue) { + checkVersion(version); + return BulkOperation.of(format, bitsPerValue); + } + + /** + * Get an {@link Encoder}. + * + * @param format the format used to store packed ints + * @param version the compatibility version + * @param bitsPerValue the number of bits per value + * @return an encoder + */ + public static Encoder getEncoder(Format format, int version, int bitsPerValue) { + checkVersion(version); + return BulkOperation.of(format, bitsPerValue); + } + + /** + * Expert: Restore a {@link Reader} from a stream without reading metadata at the beginning of the + * stream. This method is useful to restore data from streams which have been created using {@link + * XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int)}. + * + * @param in the stream to read data from, positioned at the beginning of the packed values + * @param format the format used to serialize + * @param version the version used to serialize the data + * @param valueCount how many values the stream holds + * @param bitsPerValue the number of bits per value + * @return a Reader + * @throws IOException If there is a low-level I/O error + * @see XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int) + */ + public static Reader getReaderNoHeader(DataInput in, Format format, int version, int valueCount, int bitsPerValue) throws IOException { + checkVersion(version); + switch (format) { + case PACKED_SINGLE_BLOCK: + return XPacked64SingleBlock.create(in, valueCount, bitsPerValue); + case PACKED: + return new XPacked64(version, in, valueCount, bitsPerValue); + default: + throw new AssertionError("Unknown Writer format: " + format); + } + } + + /** + * Restore a {@link Reader} from a stream. + * + * @param in the stream to read data from + * @return a Reader + * @throws IOException If there is a low-level I/O error + */ + public static Reader getReader(DataInput in) throws IOException { + final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); + final int bitsPerValue = in.readVInt(); + assert bitsPerValue > 0 && bitsPerValue <= 64 : "bitsPerValue=" + bitsPerValue; + final int valueCount = in.readVInt(); + final Format format = Format.byId(in.readVInt()); + + return getReaderNoHeader(in, format, version, valueCount, bitsPerValue); + } + + /** + * Expert: Restore a {@link ReaderIterator} from a stream without reading metadata at the + * beginning of the stream. This method is useful to restore data from streams which have been + * created using {@link XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int)}. + * + * @param in the stream to read data from, positioned at the beginning of the packed values + * @param format the format used to serialize + * @param version the version used to serialize the data + * @param valueCount how many values the stream holds + * @param bitsPerValue the number of bits per value + * @param mem how much memory the iterator is allowed to use to read-ahead (likely to speed up + * iteration) + * @return a ReaderIterator + * @see XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int) + */ + public static ReaderIterator getReaderIteratorNoHeader( + DataInput in, + Format format, + int version, + int valueCount, + int bitsPerValue, + int mem + ) { + checkVersion(version); + return new PackedReaderIterator(format, version, valueCount, bitsPerValue, in, mem); + } + + /** + * Retrieve PackedInts as a {@link ReaderIterator} + * + * @param in positioned at the beginning of a stored packed int structure. + * @param mem how much memory the iterator is allowed to use to read-ahead (likely to speed up + * iteration) + * @return an iterator to access the values + * @throws IOException if the structure could not be retrieved. + */ + public static ReaderIterator getReaderIterator(DataInput in, int mem) throws IOException { + final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); + final int bitsPerValue = in.readVInt(); + assert bitsPerValue > 0 && bitsPerValue <= 64 : "bitsPerValue=" + bitsPerValue; + final int valueCount = in.readVInt(); + final Format format = Format.byId(in.readVInt()); + return getReaderIteratorNoHeader(in, format, version, valueCount, bitsPerValue, mem); + } + + /** + * Expert: Construct a direct {@link Reader} from a stream without reading metadata at the + * beginning of the stream. This method is useful to restore data from streams which have been + * created using {@link XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int)}. + * + *

The returned reader will have very little memory overhead, but every call to {@link + * Reader#get(int)} is likely to perform a disk seek. + * + * @param in the stream to read data from + * @param format the format used to serialize + * @param version the version used to serialize the data + * @param valueCount how many values the stream holds + * @param bitsPerValue the number of bits per value + * @return a direct Reader + */ + public static Reader getDirectReaderNoHeader(final IndexInput in, Format format, int version, int valueCount, int bitsPerValue) { + checkVersion(version); + switch (format) { + case PACKED: + return new DirectPackedReader(bitsPerValue, valueCount, in); + case PACKED_SINGLE_BLOCK: + return new DirectPacked64SingleBlockReader(bitsPerValue, valueCount, in); + default: + throw new AssertionError("Unknown format: " + format); + } + } + + /** + * Construct a direct {@link Reader} from an {@link IndexInput}. This method is useful to restore + * data from streams which have been created using {@link XPackedInts#getWriter(DataOutput, int, + * int, float)}. + * + *

The returned reader will have very little memory overhead, but every call to {@link + * Reader#get(int)} is likely to perform a disk seek. + * + * @param in the stream to read data from + * @return a direct Reader + * @throws IOException If there is a low-level I/O error + */ + public static Reader getDirectReader(IndexInput in) throws IOException { + final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); + final int bitsPerValue = in.readVInt(); + assert bitsPerValue > 0 && bitsPerValue <= 64 : "bitsPerValue=" + bitsPerValue; + final int valueCount = in.readVInt(); + final Format format = Format.byId(in.readVInt()); + return getDirectReaderNoHeader(in, format, version, valueCount, bitsPerValue); + } + + /** + * Create a packed integer array with the given amount of values initialized to 0. the valueCount + * and the bitsPerValue cannot be changed after creation. All Mutables known by this factory are + * kept fully in RAM. + * + *

Positive values of acceptableOverheadRatio will trade space for speed by + * selecting a faster but potentially less memory-efficient implementation. An + * acceptableOverheadRatio of {@link PackedInts#COMPACT} will make sure that the most + * memory-efficient implementation is selected whereas {@link PackedInts#FASTEST} will make sure + * that the fastest implementation is selected. + * + * @param valueCount the number of elements + * @param bitsPerValue the number of bits available for any given value + * @param acceptableOverheadRatio an acceptable overhead ratio per value + * @return a mutable packed integer array + */ + public static Mutable getMutable(int valueCount, int bitsPerValue, float acceptableOverheadRatio) { + final FormatAndBits formatAndBits = fastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); + return getMutable(valueCount, formatAndBits.bitsPerValue, formatAndBits.format); + } + + /** + * Same as {@link #getMutable(int, int, float)} with a pre-computed number of bits per value and + * format. + * + */ + public static Mutable getMutable(int valueCount, int bitsPerValue, PackedInts.Format format) { + assert valueCount >= 0; + switch (format) { + case PACKED_SINGLE_BLOCK: + return XPacked64SingleBlock.create(valueCount, bitsPerValue); + case PACKED: + return new XPacked64(valueCount, bitsPerValue); + default: + throw new AssertionError(); + } + } + + /** + * Expert: Create a packed integer array writer for the given output, format, value count, and + * number of bits per value. + * + *

The resulting stream will be long-aligned. This means that depending on the format which is + * used, up to 63 bits will be wasted. An easy way to make sure that no space is lost is to always + * use a valueCount that is a multiple of 64. + * + *

This method does not write any metadata to the stream, meaning that it is your + * responsibility to store it somewhere else in order to be able to recover data from the stream + * later on: + * + *

    + *
  • format (using {@link Format#getId()}), + *
  • valueCount, + *
  • bitsPerValue, + *
  • {@link #VERSION_CURRENT}. + *
+ * + *

It is possible to start writing values without knowing how many of them you are actually + * going to write. To do this, just pass -1 as valueCount. On the other + * hand, for any positive value of valueCount, the returned writer will make sure + * that you don't write more values than expected and pad the end of stream with zeros in case you + * have written less than valueCount when calling {@link Writer#finish()}. + * + *

The mem parameter lets you control how much memory can be used to buffer + * changes in memory before flushing to disk. High values of mem are likely to + * improve throughput. On the other hand, if speed is not that important to you, a value of + * 0 will use as little memory as possible and should already offer reasonable throughput. + * + * @param out the data output + * @param format the format to use to serialize the values + * @param valueCount the number of values + * @param bitsPerValue the number of bits per value + * @param mem how much memory (in bytes) can be used to speed up serialization + * @return a Writer + * @see XPackedInts#getReaderIteratorNoHeader(DataInput, Format, int, int, int, int) + * @see XPackedInts#getReaderNoHeader(DataInput, Format, int, int, int) + */ + public static XWriter getWriterNoHeader(DataOutput out, Format format, int valueCount, int bitsPerValue, int mem) { + return new XPackedWriter(format, out, valueCount, bitsPerValue, mem); + } + + /** + * Create a packed integer array writer for the given output, format, value count, and number of + * bits per value. + * + *

The resulting stream will be long-aligned. This means that depending on the format which is + * used under the hoods, up to 63 bits will be wasted. An easy way to make sure that no space is + * lost is to always use a valueCount that is a multiple of 64. + * + *

This method writes metadata to the stream, so that the resulting stream is sufficient to + * restore a {@link Reader} from it. You don't need to track valueCount or + * bitsPerValue by yourself. In case this is a problem, you should probably look at {@link + * #getWriterNoHeader(DataOutput, Format, int, int, int)}. + * + *

The acceptableOverheadRatio parameter controls how readers that will be + * restored from this stream trade space for speed by selecting a faster but potentially less + * memory-efficient implementation. An acceptableOverheadRatio of {@link + * PackedInts#COMPACT} will make sure that the most memory-efficient implementation is selected + * whereas {@link PackedInts#FASTEST} will make sure that the fastest implementation is selected. + * In case you are only interested in reading this stream sequentially later on, you should + * probably use {@link PackedInts#COMPACT}. + * + * @param out the data output + * @param valueCount the number of values + * @param bitsPerValue the number of bits per value + * @param acceptableOverheadRatio an acceptable overhead ratio per value + * @return a Writer + * @throws IOException If there is a low-level I/O error + */ + public static Writer getWriter(DataOutput out, int valueCount, int bitsPerValue, float acceptableOverheadRatio) throws IOException { + assert valueCount >= 0; + + final FormatAndBits formatAndBits = fastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); + final XWriter writer = getWriterNoHeader(out, formatAndBits.format, valueCount, formatAndBits.bitsPerValue, DEFAULT_BUFFER_SIZE); + writer.writeHeader(); + return writer; + } + + /** + * Returns how many bits are required to hold values up to and including maxValue NOTE: This + * method returns at least 1. + * + * @param maxValue the maximum value that should be representable. + * @return the amount of bits needed to represent values from 0 to maxValue. + */ + public static int bitsRequired(long maxValue) { + if (maxValue < 0) { + throw new IllegalArgumentException("maxValue must be non-negative (got: " + maxValue + ")"); + } + return unsignedBitsRequired(maxValue); + } + + /** + * Returns how many bits are required to store bits, interpreted as an unsigned + * value. NOTE: This method returns at least 1. + * + */ + public static int unsignedBitsRequired(long bits) { + return Math.max(1, 64 - Long.numberOfLeadingZeros(bits)); + } + + /** + * Calculates the maximum unsigned long that can be expressed with the given number of bits. + * + * @param bitsPerValue the number of bits available for any given value. + * @return the maximum value for the given bits. + */ + public static long maxValue(int bitsPerValue) { + return bitsPerValue == 64 ? Long.MAX_VALUE : ~(~0L << bitsPerValue); + } + + /** + * Copy src[srcPos:srcPos+len] into dest[destPos:destPos+len] using at + * most mem bytes. + */ + public static void copy(Reader src, int srcPos, Mutable dest, int destPos, int len, int mem) { + assert srcPos + len <= src.size(); + assert destPos + len <= dest.size(); + final int capacity = mem >>> 3; + if (capacity == 0) { + for (int i = 0; i < len; ++i) { + dest.set(destPos++, src.get(srcPos++)); + } + } else if (len > 0) { + // use bulk operations + final long[] buf = new long[Math.min(capacity, len)]; + copy(src, srcPos, dest, destPos, len, buf); + } + } + + /** + * Same as {@link #copy(Reader, int, Mutable, int, int, int)} but using a pre-allocated buffer. + */ + static void copy(Reader src, int srcPos, Mutable dest, int destPos, int len, long[] buf) { + assert buf.length > 0; + int remaining = 0; + while (len > 0) { + final int read = src.get(srcPos, buf, remaining, Math.min(len, buf.length - remaining)); + assert read > 0; + srcPos += read; + len -= read; + remaining += read; + final int written = dest.set(destPos, buf, 0, remaining); + assert written > 0; + destPos += written; + if (written < remaining) { + System.arraycopy(buf, written, buf, 0, remaining - written); + } + remaining -= written; + } + while (remaining > 0) { + final int written = dest.set(destPos, buf, 0, remaining); + destPos += written; + remaining -= written; + System.arraycopy(buf, written, buf, 0, remaining); + } + } + + /** + * Check that the block size is a power of 2, in the right bounds, and return its log in base 2. + */ + static int checkBlockSize(int blockSize, int minBlockSize, int maxBlockSize) { + if (blockSize < minBlockSize || blockSize > maxBlockSize) { + throw new IllegalArgumentException("blockSize must be >= " + minBlockSize + " and <= " + maxBlockSize + ", got " + blockSize); + } + if ((blockSize & (blockSize - 1)) != 0) { + throw new IllegalArgumentException("blockSize must be a power of two, got " + blockSize); + } + return Integer.numberOfTrailingZeros(blockSize); + } + + /** + * Return the number of blocks required to store size values on blockSize + * . + */ + static int numBlocks(long size, int blockSize) { + final int numBlocks = (int) (size / blockSize) + (size % blockSize == 0 ? 0 : 1); + if ((long) numBlocks * blockSize < size) { + throw new IllegalArgumentException("size is too large for this block size"); + } + return numBlocks; + } +} diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index 88e04a6c5dd77..e8a06af50f525 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -80,7 +80,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_1_2_5 = new Version(1020599, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_3_0 = new Version(1030099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_4_0 = new Version(1040099, org.apache.lucene.util.Version.LUCENE_8_10_1); - public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_8_10_1); + public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_9_0_0); public static final Version CURRENT = V_2_0_0; public static Version readVersion(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/segments/IndicesSegmentResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/segments/IndicesSegmentResponse.java index ed9c086d0481c..82fe438236d0f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -154,13 +154,6 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t if (segment.getSegmentSort() != null) { toXContent(builder, segment.getSegmentSort()); } - if (segment.ramTree != null) { - builder.startArray(Fields.RAM_TREE); - for (Accountable child : segment.ramTree.getChildResources()) { - toXContent(builder, child); - } - builder.endArray(); - } if (segment.attributes != null && segment.attributes.isEmpty() == false) { builder.field("attributes", segment.attributes); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java index 43e97d3e8c437..8c3b1d20b33a0 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java @@ -224,7 +224,6 @@ static TopDocs mergeTopDocs(Collection results, int topN, int from) { if (results.isEmpty()) { return null; } - final boolean setShardIndex = false; final TopDocs topDocs = results.stream().findFirst().get(); final TopDocs mergedTopDocs; final int numShards = results.size(); @@ -234,15 +233,15 @@ static TopDocs mergeTopDocs(Collection results, int topN, int from) { CollapseTopFieldDocs firstTopDocs = (CollapseTopFieldDocs) topDocs; final Sort sort = new Sort(firstTopDocs.fields); final CollapseTopFieldDocs[] shardTopDocs = results.toArray(new CollapseTopFieldDocs[numShards]); - mergedTopDocs = CollapseTopFieldDocs.merge(sort, from, topN, shardTopDocs, setShardIndex); + mergedTopDocs = CollapseTopFieldDocs.merge(sort, from, topN, shardTopDocs, false); } else if (topDocs instanceof TopFieldDocs) { TopFieldDocs firstTopDocs = (TopFieldDocs) topDocs; final Sort sort = new Sort(firstTopDocs.fields); final TopFieldDocs[] shardTopDocs = results.toArray(new TopFieldDocs[numShards]); - mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs, setShardIndex); + mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs); } else { final TopDocs[] shardTopDocs = results.toArray(new TopDocs[numShards]); - mergedTopDocs = TopDocs.merge(from, topN, shardTopDocs, setShardIndex); + mergedTopDocs = TopDocs.merge(from, topN, shardTopDocs); } return mergedTopDocs; } diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchHelper.java b/server/src/main/java/org/opensearch/action/search/TransportSearchHelper.java index 76770245a3dbe..7ddfdfec34cb1 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchHelper.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchHelper.java @@ -32,10 +32,11 @@ package org.opensearch.action.search; -import org.apache.lucene.store.RAMOutputStream; import org.opensearch.LegacyESVersion; import org.opensearch.Version; +import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamInput; +import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.util.concurrent.AtomicArray; import org.opensearch.search.SearchPhaseResult; import org.opensearch.search.SearchShardTarget; @@ -57,7 +58,8 @@ static InternalScrollSearchRequest internalScrollSearchRequest(ShardSearchContex static String buildScrollId(AtomicArray searchPhaseResults, Version version) { boolean includeContextUUID = version.onOrAfter(LegacyESVersion.V_7_7_0); - try (RAMOutputStream out = new RAMOutputStream()) { + try { + BytesStreamOutput out = new BytesStreamOutput(); if (includeContextUUID) { out.writeString(INCLUDE_CONTEXT_UUID); } @@ -77,8 +79,7 @@ static String buildScrollId(AtomicArray searchPhase out.writeString(searchShardTarget.getNodeId()); } } - byte[] bytes = new byte[(int) out.getFilePointer()]; - out.writeTo(bytes, 0); + byte[] bytes = BytesReference.toBytes(out.bytes()); return Base64.getUrlEncoder().encodeToString(bytes); } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/server/src/main/java/org/opensearch/common/bytes/BytesArray.java b/server/src/main/java/org/opensearch/common/bytes/BytesArray.java index 832e8a3e54f16..69f715856c696 100644 --- a/server/src/main/java/org/opensearch/common/bytes/BytesArray.java +++ b/server/src/main/java/org/opensearch/common/bytes/BytesArray.java @@ -33,11 +33,11 @@ package org.opensearch.common.bytes; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.FutureArrays; import org.opensearch.common.io.stream.StreamInput; import java.io.IOException; import java.io.OutputStream; +import java.util.Arrays; public final class BytesArray extends AbstractBytesReference { @@ -96,7 +96,7 @@ public boolean equals(Object other) { } if (other instanceof BytesArray) { final BytesArray that = (BytesArray) other; - return FutureArrays.equals(bytes, offset, offset + length, that.bytes, that.offset, that.offset + that.length); + return Arrays.equals(bytes, offset, offset + length, that.bytes, that.offset, that.offset + that.length); } return super.equals(other); } diff --git a/server/src/main/java/org/opensearch/common/bytes/CompositeBytesReference.java b/server/src/main/java/org/opensearch/common/bytes/CompositeBytesReference.java index 2656a1225a07a..2a989e33e918f 100644 --- a/server/src/main/java/org/opensearch/common/bytes/CompositeBytesReference.java +++ b/server/src/main/java/org/opensearch/common/bytes/CompositeBytesReference.java @@ -35,7 +35,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefIterator; -import org.apache.lucene.util.FutureObjects; import org.apache.lucene.util.RamUsageEstimator; import java.io.IOException; @@ -100,7 +99,7 @@ public byte get(int index) { @Override public int indexOf(byte marker, int from) { final int remainingBytes = Math.max(length - from, 0); - FutureObjects.checkFromIndexSize(from, remainingBytes, length); + Objects.checkFromIndexSize(from, remainingBytes, length); int result = -1; if (length == 0) { @@ -132,7 +131,7 @@ public int length() { @Override public BytesReference slice(int from, int length) { - FutureObjects.checkFromIndexSize(from, length, this.length); + Objects.checkFromIndexSize(from, length, this.length); if (length == 0) { return BytesArray.EMPTY; diff --git a/server/src/main/java/org/opensearch/common/geo/GeoUtils.java b/server/src/main/java/org/opensearch/common/geo/GeoUtils.java index 92010a5d7f57e..1585e6cf2ad60 100644 --- a/server/src/main/java/org/opensearch/common/geo/GeoUtils.java +++ b/server/src/main/java/org/opensearch/common/geo/GeoUtils.java @@ -625,8 +625,8 @@ public static double arcDistance(double lat1, double lon1, double lat2, double l * 4 decimal degrees */ public static double planeDistance(double lat1, double lon1, double lat2, double lon2) { - double x = (lon2 - lon1) * SloppyMath.TO_RADIANS * Math.cos((lat2 + lat1) / 2.0 * SloppyMath.TO_RADIANS); - double y = (lat2 - lat1) * SloppyMath.TO_RADIANS; + double x = Math.toRadians(lon2 - lon1) * Math.cos(Math.toRadians((lat2 + lat1) / 2.0d)); + double y = Math.toRadians(lat2 - lat1); return Math.sqrt(x * x + y * y) * EARTH_MEAN_RADIUS; } diff --git a/server/src/main/java/org/opensearch/common/lucene/Lucene.java b/server/src/main/java/org/opensearch/common/lucene/Lucene.java index bdfed94a94299..6e17aab92f24b 100644 --- a/server/src/main/java/org/opensearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/opensearch/common/lucene/Lucene.java @@ -69,6 +69,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Terms; +import org.apache.lucene.index.VectorValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.FieldDoc; @@ -119,7 +120,7 @@ import java.util.Map; public class Lucene { - public static final String LATEST_CODEC = "Lucene87"; + public static final String LATEST_CODEC = "Lucene90"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; @@ -217,7 +218,7 @@ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Direc * since checksums don's match anymore. that's why we prune the name here directly. * We also want the caller to know if we were not able to remove a segments_N file. */ - if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { + if (file.startsWith(IndexFileNames.SEGMENTS)) { foundSegmentFiles++; if (file.equals(si.getSegmentsFileName()) == false) { directory.deleteFile(file); // remove all segment_N files except of the one we wanna keep @@ -260,7 +261,7 @@ public static IndexCommit getIndexCommit(SegmentInfos si, Directory directory) t public static void cleanLuceneIndex(Directory directory) throws IOException { try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { for (final String file : directory.listAll()) { - if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { + if (file.startsWith(IndexFileNames.SEGMENTS)) { directory.deleteFile(file); // remove all segment_N files } } @@ -1110,6 +1111,16 @@ public CacheHelper getCoreCacheHelper() { public CacheHelper getReaderCacheHelper() { return null; } + + @Override + public VectorValues getVectorValues(String field) throws IOException { + return null; + } + + @Override + public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException { + return null; + } }; } } diff --git a/server/src/main/java/org/opensearch/common/lucene/MinimumScoreCollector.java b/server/src/main/java/org/opensearch/common/lucene/MinimumScoreCollector.java index a00e4efe21fbb..81c98c862d2b2 100644 --- a/server/src/main/java/org/opensearch/common/lucene/MinimumScoreCollector.java +++ b/server/src/main/java/org/opensearch/common/lucene/MinimumScoreCollector.java @@ -58,7 +58,7 @@ public MinimumScoreCollector(Collector collector, float minimumScore) { @Override public void setScorer(Scorable scorer) throws IOException { if (!(scorer instanceof ScoreCachingWrappingScorer)) { - scorer = new ScoreCachingWrappingScorer(scorer); + scorer = ScoreCachingWrappingScorer.wrap(scorer); } this.scorer = scorer; leafCollector.setScorer(scorer); diff --git a/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java index 5d39b0440231b..bc83f07f74103 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java @@ -43,6 +43,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; @@ -377,4 +378,9 @@ public float getBoostTermsFactor() { public void setBoostTermsFactor(float boostTermsFactor) { this.boostTermsFactor = boostTermsFactor; } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java index 14c66df36b79e..711ff9860a5ce 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -43,6 +43,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.StringHelper; @@ -320,4 +321,9 @@ private boolean termArraysEquals(List termArrays1, List termArra public String getField() { return field; } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/Queries.java b/server/src/main/java/org/opensearch/common/lucene/search/Queries.java index d119611e02c6a..ef10d1eb0d221 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/Queries.java @@ -44,7 +44,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; -import org.opensearch.Version; import org.opensearch.common.Nullable; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.TypeFieldMapper; @@ -84,9 +83,8 @@ public static Query newNestedFilter() { /** * Creates a new non-nested docs query - * @param indexVersionCreated the index version created since newer indices can identify a parent field more efficiently */ - public static Query newNonNestedFilter(Version indexVersionCreated) { + public static Query newNonNestedFilter() { return new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME); } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java index 8ac8eb3c41ae2..4b770529af4a8 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -39,13 +39,13 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java index f7b91db2e712f..09239b0108422 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java @@ -34,7 +34,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.FilterScorer; @@ -59,7 +58,6 @@ import java.util.List; import java.util.Locale; import java.util.Objects; -import java.util.Set; /** * A query that allows for a pluggable boost function / filter. If it matches @@ -370,11 +368,6 @@ class CustomBoostFactorWeight extends Weight { this.needsScores = needsScores; } - @Override - public void extractTerms(Set terms) { - subQueryWeight.extractTerms(terms); - } - private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java index 44c76e74d5a41..846cfd4b6431e 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java @@ -34,7 +34,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; @@ -57,7 +56,6 @@ import java.io.IOException; import java.util.Objects; -import java.util.Set; /** * A query that uses a script to compute documents' scores. @@ -136,11 +134,6 @@ public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { } } - @Override - public void extractTerms(Set terms) { - subQueryWeight.extractTerms(terms); - } - @Override public Scorer scorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); diff --git a/server/src/main/java/org/opensearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/opensearch/common/settings/KeyStoreWrapper.java index 1980584982579..900eda6975526 100644 --- a/server/src/main/java/org/opensearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/opensearch/common/settings/KeyStoreWrapper.java @@ -32,13 +32,12 @@ package org.opensearch.common.settings; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; -import org.apache.lucene.store.BufferedChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.NIOFSDirectory; import org.apache.lucene.util.SetOnce; @@ -238,8 +237,7 @@ public static KeyStoreWrapper load(Path configDir, String keystoreFileName) thro } NIOFSDirectory directory = new NIOFSDirectory(configDir); - try (IndexInput indexInput = directory.openInput(keystoreFileName, IOContext.READONCE)) { - ChecksumIndexInput input = new BufferedChecksumIndexInput(indexInput); + try (ChecksumIndexInput input = EndiannessReverserUtil.openChecksumInput(directory, keystoreFileName, IOContext.READONCE)) { final int formatVersion; try { formatVersion = CodecUtil.checkHeader(input, keystoreFileName, MIN_FORMAT_VERSION, FORMAT_VERSION); @@ -521,7 +519,7 @@ public synchronized void save(Path configDir, char[] password) throws Exception NIOFSDirectory directory = new NIOFSDirectory(configDir); // write to tmp file first, then overwrite String tmpFile = KEYSTORE_FILENAME + ".tmp"; - try (IndexOutput output = directory.createOutput(tmpFile, IOContext.DEFAULT)) { + try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, tmpFile, IOContext.DEFAULT)) { CodecUtil.writeHeader(output, KEYSTORE_FILENAME, FORMAT_VERSION); output.writeByte(password.length == 0 ? (byte) 0 : (byte) 1); diff --git a/server/src/main/java/org/opensearch/common/util/CuckooFilter.java b/server/src/main/java/org/opensearch/common/util/CuckooFilter.java index 4c4b7ac5d9011..e23b21936dfe3 100644 --- a/server/src/main/java/org/opensearch/common/util/CuckooFilter.java +++ b/server/src/main/java/org/opensearch/common/util/CuckooFilter.java @@ -34,6 +34,7 @@ import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.packed.PackedInts; +import org.apache.lucene.util.packed.XPackedInts; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -80,7 +81,7 @@ public class CuckooFilter implements Writeable { private static final int MAX_EVICTIONS = 500; static final int EMPTY = 0; - private final PackedInts.Mutable data; + private final XPackedInts.Mutable data; private final int numBuckets; private final int bitsPerEntry; private final int fingerprintMask; @@ -107,7 +108,7 @@ public class CuckooFilter implements Writeable { "Attempted to create [" + numBuckets * entriesPerBucket + "] entries which is > Integer.MAX_VALUE" ); } - this.data = PackedInts.getMutable(numBuckets * entriesPerBucket, bitsPerEntry, PackedInts.COMPACT); + this.data = XPackedInts.getMutable(numBuckets * entriesPerBucket, bitsPerEntry, PackedInts.COMPACT); // puts the bits at the right side of the mask, e.g. `0000000000001111` for bitsPerEntry = 4 this.fingerprintMask = (0x80000000 >> (bitsPerEntry - 1)) >>> (Integer.SIZE - bitsPerEntry); @@ -132,7 +133,7 @@ public class CuckooFilter implements Writeable { ); } // TODO this is probably super slow, but just used for testing atm - this.data = PackedInts.getMutable(numBuckets * entriesPerBucket, bitsPerEntry, PackedInts.COMPACT); + this.data = XPackedInts.getMutable(numBuckets * entriesPerBucket, bitsPerEntry, PackedInts.COMPACT); for (int i = 0; i < other.data.size(); i++) { data.set(i, other.data.get(i)); } @@ -148,7 +149,7 @@ public class CuckooFilter implements Writeable { this.fingerprintMask = (0x80000000 >> (bitsPerEntry - 1)) >>> (Integer.SIZE - bitsPerEntry); - data = (PackedInts.Mutable) PackedInts.getReader(new DataInput() { + data = (XPackedInts.Mutable) XPackedInts.getReader(new DataInput() { @Override public byte readByte() throws IOException { return in.readByte(); @@ -158,6 +159,11 @@ public byte readByte() throws IOException { public void readBytes(byte[] b, int offset, int len) throws IOException { in.readBytes(b, offset, len); } + + @Override + public void skipBytes(long numBytes) throws IOException { + in.skip(numBytes); + } }); } diff --git a/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java b/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java index 53b297e0f99fb..fd1dee46815a8 100644 --- a/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java +++ b/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java @@ -34,6 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; @@ -117,7 +118,7 @@ private void writeStateToFirstLocation(final T state, Path stateLocation, Direct throws WriteStateException { try { deleteFileIfExists(stateLocation, stateDir, tmpFileName); - try (IndexOutput out = stateDir.createOutput(tmpFileName, IOContext.DEFAULT)) { + try (IndexOutput out = EndiannessReverserUtil.createOutput(stateDir, tmpFileName, IOContext.DEFAULT)) { CodecUtil.writeHeader(out, STATE_FILE_CODEC, STATE_FILE_VERSION); out.writeInt(FORMAT.index()); try (XContentBuilder builder = newXContentBuilder(FORMAT, new IndexOutputOutputStream(out) { @@ -306,7 +307,7 @@ protected XContentBuilder newXContentBuilder(XContentType type, OutputStream str */ public final T read(NamedXContentRegistry namedXContentRegistry, Path file) throws IOException { try (Directory dir = newDirectory(file.getParent())) { - try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { + try (IndexInput indexInput = EndiannessReverserUtil.openInput(dir, file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION); diff --git a/server/src/main/java/org/opensearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/opensearch/index/cache/bitset/BitsetFilterCache.java index eb14b902d1aef..0bc77f0f0079b 100644 --- a/server/src/main/java/org/opensearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/opensearch/index/cache/bitset/BitsetFilterCache.java @@ -33,6 +33,7 @@ package org.opensearch.index.cache.bitset; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; @@ -147,7 +148,7 @@ public void clear(String reason) { } private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws ExecutionException { - final IndexReader.CacheHelper cacheHelper = context.reader().getCoreCacheHelper(); + final IndexReader.CacheHelper cacheHelper = FilterLeafReader.unwrap(context.reader()).getCoreCacheHelper(); if (cacheHelper == null) { throw new IllegalArgumentException("Reader " + context.reader() + " does not support caching"); } @@ -273,7 +274,7 @@ public IndexWarmer.TerminationHandle warmReader(final IndexShard indexShard, fin } if (hasNested) { - warmUp.add(Queries.newNonNestedFilter(indexSettings.getIndexVersionCreated())); + warmUp.add(Queries.newNonNestedFilter()); } final CountDownLatch latch = new CountDownLatch(reader.leaves().size() * warmUp.size()); diff --git a/server/src/main/java/org/opensearch/index/codec/CodecService.java b/server/src/main/java/org/opensearch/index/codec/CodecService.java index 136810c8cc2e2..d22c7239922bc 100644 --- a/server/src/main/java/org/opensearch/index/codec/CodecService.java +++ b/server/src/main/java/org/opensearch/index/codec/CodecService.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene87.Lucene87Codec; -import org.apache.lucene.codecs.lucene87.Lucene87Codec.Mode; +import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene90.Lucene90Codec.Mode; import org.opensearch.common.Nullable; import org.opensearch.common.collect.MapBuilder; import org.opensearch.index.mapper.MapperService; @@ -60,8 +60,8 @@ public class CodecService { public CodecService(@Nullable MapperService mapperService, Logger logger) { final MapBuilder codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene87Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene87Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene90Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene90Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_COMPRESSION, mapperService, logger)); diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index 06f2216a28812..20a8ff7ca9170 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -36,8 +36,8 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat; -import org.apache.lucene.codecs.lucene87.Lucene87Codec; +import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.opensearch.common.lucene.Lucene; import org.opensearch.index.mapper.CompletionFieldMapper; import org.opensearch.index.mapper.MappedFieldType; @@ -51,10 +51,10 @@ * per index in real time via the mapping API. If no specific postings format is * configured for a specific field the default postings format is used. */ -public class PerFieldMappingPostingFormatCodec extends Lucene87Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene90Codec { private final Logger logger; private final MapperService mapperService; - private final DocValuesFormat dvFormat = new Lucene80DocValuesFormat(Lucene80DocValuesFormat.Mode.BEST_COMPRESSION); + private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); static { assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMappingPostingFormatCodec.class) diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index 825d71d6d1024..bba1d8c069c68 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -51,7 +51,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.apache.lucene.util.Accountables; import org.apache.lucene.util.SetOnce; import org.opensearch.ExceptionsHelper; import org.opensearch.action.index.IndexRequest; @@ -997,9 +996,6 @@ private void fillSegmentInfo(SegmentReader segmentReader, boolean verbose, boole logger.trace(() -> new ParameterizedMessage("failed to get size for [{}]", info.info.name), e); } segment.segmentSort = info.info.getIndexSort(); - if (verbose) { - segment.ramTree = Accountables.namedAccountable("root", segmentReader); - } segment.attributes = info.info.getAttributes(); // TODO: add more fine grained mem stats values to per segment info here segments.put(info.info.name, segment); diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 1c5f06e85cb88..84090047d68e8 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -53,7 +53,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ReferenceManager; @@ -77,6 +76,7 @@ import org.opensearch.common.lucene.LoggerInfoStream; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; +import org.opensearch.common.lucene.search.Queries; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver; import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo; @@ -2978,7 +2978,7 @@ private void restoreVersionMapAndCheckpointTracker(DirectoryReader directoryRead BooleanClause.Occur.MUST ) // exclude non-root nested documents - .add(new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME), BooleanClause.Occur.MUST) + .add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST) .build(); final Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1.0f); for (LeafReaderContext leaf : directoryReader.leaves()) { diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index ae1dc9e647073..98da554a5a1b0 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -47,7 +47,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.util.ArrayUtil; -import org.opensearch.Version; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.search.Queries; @@ -247,7 +246,7 @@ private void fillParallelArray(ScoreDoc[] scoreDocs, ParallelArray parallelArray private static Query operationsRangeQuery(long fromSeqNo, long toSeqNo) { return new BooleanQuery.Builder().add(LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, fromSeqNo, toSeqNo), BooleanClause.Occur.MUST) - .add(Queries.newNonNestedFilter(Version.CURRENT), BooleanClause.Occur.MUST) // exclude non-root nested docs + .add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST) // exclude non-root nested docs .build(); } diff --git a/server/src/main/java/org/opensearch/index/engine/PrunePostingsMergePolicy.java b/server/src/main/java/org/opensearch/index/engine/PrunePostingsMergePolicy.java index aee87dd7e1a42..18f2799edc244 100644 --- a/server/src/main/java/org/opensearch/index/engine/PrunePostingsMergePolicy.java +++ b/server/src/main/java/org/opensearch/index/engine/PrunePostingsMergePolicy.java @@ -151,11 +151,6 @@ public ImpactsEnum impacts(int flags) throws IOException { public int size() { return postingsReader.size(); } - - @Override - public long ramBytesUsed() { - return postingsReader.ramBytesUsed(); - } }; } diff --git a/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java index b1d08d2605640..0a885f23629e6 100644 --- a/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -46,7 +46,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.StoredFieldVisitor; -import org.apache.lucene.search.ConjunctionDISI; +import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -119,7 +119,7 @@ public NumericDocValues getNumeric(FieldInfo field) throws IOException { // we can't return null here lucenes DocIdMerger expects an instance intersection = DocIdSetIterator.empty(); } else { - intersection = ConjunctionDISI.intersectIterators( + intersection = ConjunctionUtils.intersectIterators( Arrays.asList(numeric, new BitSetIterator(recoverySourceToKeep, recoverySourceToKeep.length())) ); } @@ -202,11 +202,6 @@ public void checkIntegrity() throws IOException { public void close() throws IOException { in.close(); } - - @Override - public long ramBytesUsed() { - return in.ramBytesUsed(); - } } private abstract static class FilterStoredFieldsReader extends StoredFieldsReader { @@ -217,11 +212,6 @@ private abstract static class FilterStoredFieldsReader extends StoredFieldsReade this.in = fieldsReader; } - @Override - public long ramBytesUsed() { - return in.ramBytesUsed(); - } - @Override public void close() throws IOException { in.close(); @@ -294,7 +284,7 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException { } @Override - public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { + public void stringField(FieldInfo fieldInfo, String value) throws IOException { visitor.stringField(fieldInfo, value); } diff --git a/server/src/main/java/org/opensearch/index/engine/Segment.java b/server/src/main/java/org/opensearch/index/engine/Segment.java index 2b824c847f75f..4874d0a30196f 100644 --- a/server/src/main/java/org/opensearch/index/engine/Segment.java +++ b/server/src/main/java/org/opensearch/index/engine/Segment.java @@ -39,7 +39,6 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedNumericSelector; import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Accountables; import org.opensearch.Version; import org.opensearch.common.Nullable; import org.opensearch.common.io.stream.StreamInput; @@ -49,9 +48,7 @@ import org.opensearch.common.unit.ByteSizeValue; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; -import java.util.List; import java.util.Map; import java.util.Objects; @@ -68,7 +65,6 @@ public class Segment implements Writeable { public Boolean compound = null; public String mergeId; public Sort segmentSort; - public Accountable ramTree = null; public Map attributes; private static final ByteSizeValue ZERO_BYTE_SIZE_VALUE = new ByteSizeValue(0L); @@ -91,7 +87,7 @@ public Segment(StreamInput in) throws IOException { } if (in.readBoolean()) { // verbose mode - ramTree = readRamTree(in); + readRamTree(in); } segmentSort = readSegmentSort(in); if (in.readBoolean()) { @@ -207,12 +203,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().before(Version.V_2_0_0)) { out.writeLong(0L); } - - boolean verbose = ramTree != null; - out.writeBoolean(verbose); - if (verbose) { - writeRamTree(out, ramTree); - } + out.writeBoolean(false); writeSegmentSort(out, segmentSort); boolean hasAttributes = attributes != null; out.writeBoolean(hasAttributes); @@ -312,18 +303,13 @@ private void writeSegmentSort(StreamOutput out, Sort sort) throws IOException { } } - private Accountable readRamTree(StreamInput in) throws IOException { - final String name = in.readString(); - final long bytes = in.readVLong(); + private static void readRamTree(StreamInput in) throws IOException { + in.readString(); + in.readVLong(); int numChildren = in.readVInt(); - if (numChildren == 0) { - return Accountables.namedAccountable(name, bytes); - } - List children = new ArrayList<>(numChildren); - while (numChildren-- > 0) { - children.add(readRamTree(in)); + for (int i = 0; i < numChildren; i++) { + readRamTree(in); } - return Accountables.namedAccountable(name, children, bytes); } // the ram tree is written recursively since the depth is fairly low (5 or 6) diff --git a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java index ddf40e55be4b3..07fe3f9230de4 100644 --- a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java +++ b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java @@ -46,6 +46,9 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Terms; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.index.VectorValues; +import org.apache.lucene.search.TopDocs; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.opensearch.common.util.set.Sets; @@ -56,7 +59,6 @@ import org.opensearch.index.translog.Translog; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.Set; @@ -79,6 +81,8 @@ public final class TranslogLeafReader extends LeafReader { 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); private static final FieldInfo FAKE_ROUTING_FIELD = new FieldInfo( @@ -94,6 +98,8 @@ public final class TranslogLeafReader extends LeafReader { 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); private static final FieldInfo FAKE_ID_FIELD = new FieldInfo( @@ -109,6 +115,8 @@ public final class TranslogLeafReader extends LeafReader { 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); public static Set ALL_FIELD_NAMES = Sets.newHashSet(FAKE_SOURCE_FIELD.name, FAKE_ROUTING_FIELD.name, FAKE_ID_FIELD.name); @@ -208,7 +216,7 @@ public void document(int docID, StoredFieldVisitor visitor) throws IOException { visitor.binaryField(FAKE_SOURCE_FIELD, operation.source().toBytesRef().bytes); } if (operation.routing() != null && visitor.needsField(FAKE_ROUTING_FIELD) == StoredFieldVisitor.Status.YES) { - visitor.stringField(FAKE_ROUTING_FIELD, operation.routing().getBytes(StandardCharsets.UTF_8)); + visitor.stringField(FAKE_ROUTING_FIELD, operation.routing()); } if (visitor.needsField(FAKE_ID_FIELD) == StoredFieldVisitor.Status.YES) { BytesRef bytesRef = Uid.encodeId(operation.id()); @@ -227,4 +235,14 @@ protected void doClose() { public CacheHelper getReaderCacheHelper() { throw new UnsupportedOperationException(); } + + @Override + public VectorValues getVectorValues(String field) throws IOException { + return getVectorValues(field); + } + + @Override + public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException { + throw new UnsupportedOperationException(); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/IndexNumericFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/IndexNumericFieldData.java index 578657bea8818..dd987e9f79546 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/IndexNumericFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/IndexNumericFieldData.java @@ -129,6 +129,8 @@ public final SortField sortField( : SortedNumericSelector.Type.MIN; SortField sortField = new SortedNumericSortField(getFieldName(), getNumericType().sortFieldType, reverse, selectorType); sortField.setMissingValue(source.missingObject(missingValue, reverse)); + // todo: remove since deprecated + sortField.setOptimizeSortWithPoints(false); return sortField; } diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java index f680ceffc517d..e661b61548cd0 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -31,10 +31,6 @@ package org.opensearch.index.fielddata.plain; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.codecs.blocktree.FieldReader; -import org.apache.lucene.codecs.blocktree.Stats; import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -69,7 +65,6 @@ import java.io.IOException; public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { - private static final Logger logger = LogManager.getLogger(PagedBytesIndexFieldData.class); private final double minFrequency, maxFrequency; private final int minSegmentSize; @@ -226,36 +221,6 @@ public long bytesPerValue(BytesRef term) { return bytes; } - /** - * @return the estimate for loading the entire term set into field data, or 0 if unavailable - */ - public long estimateStringFieldData() { - try { - LeafReader reader = context.reader(); - Terms terms = reader.terms(getFieldName()); - - final Terms fieldTerms = reader.terms(getFieldName()); - - if (fieldTerms instanceof FieldReader) { - final Stats stats = ((FieldReader) fieldTerms).getStats(); - long totalTermBytes = stats.totalTermBytes; - if (logger.isTraceEnabled()) { - logger.trace( - "totalTermBytes: {}, terms.size(): {}, terms.getSumDocFreq(): {}", - totalTermBytes, - terms.size(), - terms.getSumDocFreq() - ); - } - long totalBytes = totalTermBytes + (2 * terms.size()) + (4 * terms.getSumDocFreq()); - return totalBytes; - } - } catch (Exception e) { - logger.warn("Unable to estimate memory overhead", e); - } - return 0; - } - /** * Determine whether the BlockTreeTermsReader.FieldReader can be used * for estimating the field data, adding the estimate to the circuit @@ -271,25 +236,7 @@ public TermsEnum beforeLoad(Terms terms) throws IOException { TermsEnum iterator = terms.iterator(); TermsEnum filteredIterator = filter(terms, iterator, reader); - final boolean filtered = iterator != filteredIterator; - iterator = filteredIterator; - - if (filtered) { - if (logger.isTraceEnabled()) { - logger.trace("Filter exists, can't circuit break normally, using RamAccountingTermsEnum"); - } - return new RamAccountingTermsEnum(iterator, breaker, this, this.fieldName); - } else { - estimatedBytes = this.estimateStringFieldData(); - // If we weren't able to estimate, wrap in the RamAccountingTermsEnum - if (estimatedBytes == 0) { - iterator = new RamAccountingTermsEnum(iterator, breaker, this, this.fieldName); - } else { - breaker.addEstimateBytesAndMaybeBreak(estimatedBytes, fieldName); - } - - return iterator; - } + return new RamAccountingTermsEnum(filteredIterator, breaker, this, this.fieldName); } private TermsEnum filter(Terms terms, TermsEnum iterator, LeafReader reader) throws IOException { diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java index b050e0d3dfa9f..714b762f81fb7 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -32,7 +32,6 @@ package org.opensearch.index.fielddata.plain; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.LeafReader; @@ -53,6 +52,7 @@ import org.opensearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; import org.opensearch.index.mapper.DocValueFetcher; import org.opensearch.indices.breaker.CircuitBreakerService; +import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java index a51137b4a4f69..eb094ce1df9a4 100644 --- a/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java +++ b/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java @@ -39,18 +39,17 @@ import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.IgnoredFieldMapper; import org.opensearch.index.mapper.MappedFieldType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.RoutingFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.Uid; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableSet; @@ -96,9 +95,9 @@ public Status needsField(FieldInfo fieldInfo) { return requiredFields.isEmpty() ? Status.STOP : Status.NO; } - public void postProcess(MapperService mapperService) { + public final void postProcess(Function fieldTypeLookup) { for (Map.Entry> entry : fields().entrySet()) { - MappedFieldType fieldType = mapperService.fieldType(entry.getKey()); + MappedFieldType fieldType = fieldTypeLookup.apply(entry.getKey()); if (fieldType == null) { throw new IllegalStateException("Field [" + entry.getKey() + "] exists in the index but not in mappings"); } @@ -125,10 +124,9 @@ public void binaryField(FieldInfo fieldInfo, BytesRef value) { } @Override - public void stringField(FieldInfo fieldInfo, byte[] bytes) { + public void stringField(FieldInfo fieldInfo, String value) { assert IdFieldMapper.NAME.equals(fieldInfo.name) == false : "_id field must go through binaryField"; assert sourceFieldName.equals(fieldInfo.name) == false : "source field must go through binaryField"; - final String value = new String(bytes, StandardCharsets.UTF_8); addValue(fieldInfo.name, value); } diff --git a/server/src/main/java/org/opensearch/index/fieldvisitor/SingleFieldsVisitor.java b/server/src/main/java/org/opensearch/index/fieldvisitor/SingleFieldsVisitor.java index a3aacd823fbb8..3214ec2780701 100644 --- a/server/src/main/java/org/opensearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/server/src/main/java/org/opensearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -38,7 +38,6 @@ import org.opensearch.index.mapper.Uid; import org.apache.lucene.util.BytesRef; -import java.nio.charset.StandardCharsets; import java.util.List; /** @@ -84,8 +83,8 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) { } @Override - public void stringField(FieldInfo fieldInfo, byte[] bytes) { - addValue(new String(bytes, StandardCharsets.UTF_8)); + public void stringField(FieldInfo fieldInfo, String value) { + addValue(value); } @Override diff --git a/server/src/main/java/org/opensearch/index/get/ShardGetService.java b/server/src/main/java/org/opensearch/index/get/ShardGetService.java index a877b0085816a..e63d80336bc7a 100644 --- a/server/src/main/java/org/opensearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/opensearch/index/get/ShardGetService.java @@ -39,6 +39,7 @@ import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; +import org.apache.lucene.index.VectorSimilarityFunction; import org.opensearch.OpenSearchException; import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; @@ -324,6 +325,8 @@ private GetResult innerGetLoadFromStoredFields( 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); StoredFieldVisitor.Status status = fieldVisitor.needsField(fieldInfo); @@ -347,7 +350,7 @@ private GetResult innerGetLoadFromStoredFields( // put stored fields into result objects if (!fieldVisitor.fields().isEmpty()) { - fieldVisitor.postProcess(mapperService); + fieldVisitor.postProcess(mapperService::fieldType); documentFields = new HashMap<>(); metadataFields = new HashMap<>(); for (Map.Entry> entry : fieldVisitor.fields().entrySet()) { diff --git a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java index 5d0e2349aca6a..2d0f59fc76ea4 100644 --- a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; -import org.apache.lucene.search.suggest.document.Completion84PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -311,7 +311,7 @@ public ContextMappings getContextMappings() { */ public static synchronized PostingsFormat postingsFormat() { if (postingsFormat == null) { - postingsFormat = new Completion84PostingsFormat(); + postingsFormat = new Completion90PostingsFormat(); } return postingsFormat; } diff --git a/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java index 9aa7b019a8e61..e2239069603e6 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java @@ -37,9 +37,9 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.PointValues; +import org.apache.lucene.sandbox.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; -import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.Query; import org.opensearch.OpenSearchParseException; import org.opensearch.Version; diff --git a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java index 575cfc8ca424b..645115bfe26e2 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java @@ -38,6 +38,8 @@ import org.apache.lucene.index.PrefixCodedTerms.TermIterator; import org.apache.lucene.index.Term; import org.apache.lucene.queries.intervals.IntervalsSource; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -48,8 +50,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchParseException; import org.opensearch.common.Nullable; diff --git a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java index 368f4ae4adea3..bf7b1ae70c711 100644 --- a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java @@ -37,14 +37,14 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.apache.lucene.sandbox.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; -import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; diff --git a/server/src/main/java/org/opensearch/index/mapper/RangeType.java b/server/src/main/java/org/opensearch/index/mapper/RangeType.java index 9b0c374f8b54e..5f666dece7ed2 100644 --- a/server/src/main/java/org/opensearch/index/mapper/RangeType.java +++ b/server/src/main/java/org/opensearch/index/mapper/RangeType.java @@ -46,7 +46,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.FutureArrays; import org.opensearch.common.Nullable; import org.opensearch.common.geo.ShapeRelation; import org.opensearch.common.network.InetAddresses; @@ -62,6 +61,7 @@ import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.function.BiFunction; @@ -196,7 +196,7 @@ private Query createQuery( ) { byte[] lowerBytes = InetAddressPoint.encode((InetAddress) lower); byte[] upperBytes = InetAddressPoint.encode((InetAddress) upper); - if (FutureArrays.compareUnsigned(lowerBytes, 0, lowerBytes.length, upperBytes, 0, upperBytes.length) > 0) { + if (Arrays.compareUnsigned(lowerBytes, 0, lowerBytes.length, upperBytes, 0, upperBytes.length) > 0) { throw new IllegalArgumentException("Range query `from` value (" + lower + ") is greater than `to` value (" + upper + ")"); } InetAddress correctedFrom = includeLower ? (InetAddress) lower : nextUp(lower); @@ -204,7 +204,7 @@ private Query createQuery( ; lowerBytes = InetAddressPoint.encode(correctedFrom); upperBytes = InetAddressPoint.encode(correctedTo); - if (FutureArrays.compareUnsigned(lowerBytes, 0, lowerBytes.length, upperBytes, 0, upperBytes.length) > 0) { + if (Arrays.compareUnsigned(lowerBytes, 0, lowerBytes.length, upperBytes, 0, upperBytes.length) > 0) { return new MatchNoDocsQuery("float range didn't intersect anything"); } else { return querySupplier.apply(correctedFrom, correctedTo); diff --git a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java index bcb3134e532d7..049c85dc910ed 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java @@ -48,6 +48,12 @@ import org.apache.lucene.index.Term; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -59,12 +65,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; @@ -1067,8 +1067,9 @@ public static Query createPhrasePrefixQuery( } if (terms.length == 1) { - Term[] newTerms = Arrays.stream(terms[0]).map(term -> new Term(prefixField, term.bytes())).toArray(Term[]::new); - return new SynonymQuery(newTerms); + SynonymQuery.Builder sb = new SynonymQuery.Builder(prefixField); + Arrays.stream(terms[0]).map(term -> new Term(prefixField, term.bytes())).forEach(sb::addTerm); + return sb.build(); } SpanNearQuery.Builder spanQuery = new SpanNearQuery.Builder(field, true); diff --git a/server/src/main/java/org/opensearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/AbstractQueryBuilder.java index 7088b914adc22..3ab3d099f0778 100644 --- a/server/src/main/java/org/opensearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/AbstractQueryBuilder.java @@ -35,8 +35,6 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanBoostQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; @@ -116,9 +114,7 @@ public final Query toQuery(QueryShardContext context) throws IOException { Query query = doToQuery(context); if (query != null) { if (boost != DEFAULT_BOOST) { - if (query instanceof SpanQuery) { - query = new SpanBoostQuery((SpanQuery) query, boost); - } else if (query instanceof MatchNoDocsQuery == false) { + if (query instanceof MatchNoDocsQuery == false) { query = new BoostQuery(query, boost); } } diff --git a/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java index ebc451a4493ed..1b095c6130a7c 100644 --- a/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; @@ -135,6 +135,7 @@ public static FieldMaskingSpanQueryBuilder fromXContent(XContentParser parser) t ); } inner = (SpanQueryBuilder) query; + SpanQueryBuilderUtil.checkNoBoost(SPAN_FIELD_MASKING_FIELD.getPreferredName(), currentFieldName, parser, inner); } else { throw new ParsingException( parser.getTokenLocation(), @@ -176,7 +177,7 @@ public static FieldMaskingSpanQueryBuilder fromXContent(XContentParser parser) t } @Override - protected SpanQuery doToQuery(QueryShardContext context) throws IOException { + protected Query doToQuery(QueryShardContext context) throws IOException { String fieldInQuery = fieldName; MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType != null) { diff --git a/server/src/main/java/org/opensearch/index/query/InnerHitContextBuilder.java b/server/src/main/java/org/opensearch/index/query/InnerHitContextBuilder.java index 712f106545c41..10efb18dc0cfa 100644 --- a/server/src/main/java/org/opensearch/index/query/InnerHitContextBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/InnerHitContextBuilder.java @@ -109,13 +109,13 @@ protected void setupInnerHitsContext(QueryShardContext queryShardContext, InnerH } if (innerHitBuilder.getDocValueFields() != null) { FetchDocValuesContext docValuesContext = FetchDocValuesContext.create( - queryShardContext.getMapperService(), + queryShardContext::simpleMatchToIndexNames, + queryShardContext.getIndexSettings().getMaxDocvalueFields(), innerHitBuilder.getDocValueFields() ); innerHitsContext.docValuesContext(docValuesContext); } if (innerHitBuilder.getFetchFields() != null) { - String indexName = queryShardContext.index().getName(); FetchFieldsContext fieldsContext = new FetchFieldsContext(innerHitBuilder.getFetchFields()); innerHitsContext.fetchFieldsContext(fieldsContext); } diff --git a/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java index 0fada4f4a1363..52a7635d708f5 100644 --- a/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java @@ -301,7 +301,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { Query innerQuery; ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); if (objectMapper == null) { - parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())); + parentFilter = context.bitsetFilter(Queries.newNonNestedFilter()); } else { parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter()); } @@ -416,7 +416,7 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { Query rawParentFilter; if (parentObjectMapper == null) { - rawParentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated()); + rawParentFilter = Queries.newNonNestedFilter(); } else { rawParentFilter = parentObjectMapper.nestedTypeFilter(); } diff --git a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java index 8739e48eb411b..1d781060dc6bc 100644 --- a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java @@ -38,6 +38,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; @@ -180,6 +181,11 @@ public String toString(String field) { return buffer.toString(); } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public boolean equals(Object obj) { if (sameClassAs(obj) == false) return false; diff --git a/server/src/main/java/org/opensearch/index/query/SpanContainingQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanContainingQueryBuilder.java index 131b27b6b6ad9..9168f7a29f2dc 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanContainingQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanContainingQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanContainingQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanContainingQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; @@ -48,7 +48,7 @@ import static org.opensearch.index.query.SpanQueryBuilder.SpanQueryBuilderUtil.checkNoBoost; /** - * Builder for {@link org.apache.lucene.search.spans.SpanContainingQuery}. + * Builder for {@link org.apache.lucene.queries.spans.SpanContainingQuery}. */ public class SpanContainingQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_containing"; diff --git a/server/src/main/java/org/opensearch/index/query/SpanFirstQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanFirstQueryBuilder.java index 087382d570411..e3d6315b5d18c 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanFirstQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanFirstQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanFirstQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanFirstQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanMultiTermQueryBuilder.java index 2783e0b9b8777..9411ba9c59087 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanMultiTermQueryBuilder.java @@ -32,13 +32,13 @@ package org.opensearch.index.query; import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopTermsRewrite; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanNearQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanNearQueryBuilder.java index 264a3f87dd2dd..a648eedc9f3b5 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanNearQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanNearQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; diff --git a/server/src/main/java/org/opensearch/index/query/SpanNotQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanNotQueryBuilder.java index 4db989e29b841..c43430e00de98 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanNotQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanNotQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanNotQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanNotQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanOrQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanOrQueryBuilder.java index 1e4a3b45f0df8..0a08f778cf889 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanOrQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanOrQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanTermQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanTermQueryBuilder.java index 07091bf0eb1b6..02a0f55685ca4 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanTermQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanTermQueryBuilder.java @@ -33,9 +33,9 @@ package org.opensearch.index.query; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanWithinQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanWithinQueryBuilder.java index 83bf3d1b90eea..a8ab2a8831f55 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanWithinQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanWithinQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanWithinQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanWithinQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; @@ -48,7 +48,7 @@ import static org.opensearch.index.query.SpanQueryBuilder.SpanQueryBuilderUtil.checkNoBoost; /** - * Builder for {@link org.apache.lucene.search.spans.SpanWithinQuery}. + * Builder for {@link org.apache.lucene.queries.spans.SpanWithinQuery}. */ public class SpanWithinQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_within"; diff --git a/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java index 55ea770c2e154..f7dd146f9f019 100644 --- a/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java @@ -34,8 +34,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.Term; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValues; diff --git a/server/src/main/java/org/opensearch/index/search/MatchQuery.java b/server/src/main/java/org/opensearch/index/search/MatchQuery.java index 75f8d9aa6ba9f..485715c430b3f 100644 --- a/server/src/main/java/org/opensearch/index/search/MatchQuery.java +++ b/server/src/main/java/org/opensearch/index/search/MatchQuery.java @@ -42,6 +42,11 @@ import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -50,11 +55,6 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.QueryBuilder; import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; import org.opensearch.OpenSearchException; @@ -550,11 +550,6 @@ private SpanQuery newSpanQuery(Term[] terms, boolean isPrefix) { return new SpanOrQuery(spanQueries); } - @Override - protected SpanQuery createSpanQuery(TokenStream in, String field) throws IOException { - return createSpanQuery(in, field, false); - } - private SpanQuery createSpanQuery(TokenStream in, String field, boolean isPrefix) throws IOException { TermToBytesRefAttribute termAtt = in.getAttribute(TermToBytesRefAttribute.class); PositionIncrementAttribute posIncAtt = in.getAttribute(PositionIncrementAttribute.class); diff --git a/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java index 85c801ca43fe6..e9437f5704851 100644 --- a/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java @@ -37,6 +37,9 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queryparser.classic.ParseException; import org.apache.lucene.queryparser.classic.Token; import org.apache.lucene.queryparser.classic.XQueryParser; @@ -52,9 +55,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.RegExp; import org.opensearch.common.lucene.search.Queries; @@ -646,11 +646,11 @@ private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr, Mappe } } else if (isLastPos == false) { // build a synonym query for terms in the same position. - Term[] terms = new Term[plist.size()]; - for (int i = 0; i < plist.size(); i++) { - terms[i] = new Term(field, plist.get(i)); + SynonymQuery.Builder sb = new SynonymQuery.Builder(field); + for (String synonym : plist) { + sb.addTerm(new Term(field, synonym)); } - posQuery = new SynonymQuery(terms); + posQuery = sb.build(); } else { List innerClauses = new ArrayList<>(); for (String token : plist) { diff --git a/server/src/main/java/org/opensearch/index/search/SimpleQueryStringQueryParser.java b/server/src/main/java/org/opensearch/index/search/SimpleQueryStringQueryParser.java index 2173c53f8131a..f4f68634d0df5 100644 --- a/server/src/main/java/org/opensearch/index/search/SimpleQueryStringQueryParser.java +++ b/server/src/main/java/org/opensearch/index/search/SimpleQueryStringQueryParser.java @@ -287,11 +287,12 @@ private Query newPossiblyAnalyzedQuery(String field, String termStr, Analyzer an } } else if (isLastPos == false) { // build a synonym query for terms in the same position. - Term[] terms = new Term[plist.size()]; - for (int i = 0; i < plist.size(); i++) { - terms[i] = new Term(field, plist.get(i)); + SynonymQuery.Builder sb = new SynonymQuery.Builder(field); + for (BytesRef bytesRef : plist) { + sb.addTerm(new Term(field, bytesRef)); + } - posQuery = new SynonymQuery(terms); + posQuery = sb.build(); } else { BooleanQuery.Builder innerBuilder = new BooleanQuery.Builder(); for (BytesRef token : plist) { diff --git a/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java index a76e296c40681..16d76ece840a2 100644 --- a/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java @@ -45,6 +45,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; @@ -257,13 +258,10 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException { } @Override - public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { - spare.bytes = value; - spare.offset = 0; - spare.length = value.length; + public void stringField(FieldInfo fieldInfo, String value) throws IOException { switch (fieldInfo.name) { case RoutingFieldMapper.NAME: - routing = spare.utf8ToString(); + routing = value; break; default: throw new IllegalStateException("Unexpected field: " + fieldInfo.name); @@ -358,7 +356,7 @@ public float matchCost() { */ private static BitSetProducer newParentDocBitSetProducer(Version indexVersionCreated) { return context -> { - Query query = Queries.newNonNestedFilter(indexVersionCreated); + Query query = Queries.newNonNestedFilter(); final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context); final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); @@ -367,4 +365,9 @@ private static BitSetProducer newParentDocBitSetProducer(Version indexVersionCre return s == null ? null : BitSet.of(s.iterator(), context.reader().maxDoc()); }; } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 20bb6e7060ca3..387f77a839d35 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -191,7 +191,7 @@ void addIndices( assert sources.length > 0; final int luceneIndexCreatedVersionMajor = Lucene.readSegmentInfos(sources[0]).getIndexCreatedVersionMajor(); - final Directory hardLinkOrCopyTarget = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target); + final Directory hardLinkOrCopyTarget = new org.apache.lucene.misc.store.HardlinkCopyDirectoryWrapper(target); IndexWriterConfig iwc = new IndexWriterConfig(null).setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setCommitOnClose(false) diff --git a/server/src/main/java/org/opensearch/index/similarity/SimilarityProviders.java b/server/src/main/java/org/opensearch/index/similarity/SimilarityProviders.java index 41d97b9d2a53d..3617c9607a3ab 100644 --- a/server/src/main/java/org/opensearch/index/similarity/SimilarityProviders.java +++ b/server/src/main/java/org/opensearch/index/similarity/SimilarityProviders.java @@ -32,6 +32,7 @@ package org.opensearch.index.similarity; +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.similarities.AfterEffect; import org.apache.lucene.search.similarities.AfterEffectB; import org.apache.lucene.search.similarities.AfterEffectL; @@ -62,7 +63,6 @@ import org.apache.lucene.search.similarities.NormalizationH2; import org.apache.lucene.search.similarities.NormalizationH3; import org.apache.lucene.search.similarities.NormalizationZ; -import org.apache.lucene.search.similarity.LegacyBM25Similarity; import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.common.logging.DeprecationLogger; @@ -307,9 +307,7 @@ public static LegacyBM25Similarity createBM25Similarity(Settings settings, Versi float b = settings.getAsFloat("b", 0.75f); boolean discountOverlaps = settings.getAsBoolean(DISCOUNT_OVERLAPS, true); - LegacyBM25Similarity similarity = new LegacyBM25Similarity(k1, b); - similarity.setDiscountOverlaps(discountOverlaps); - return similarity; + return new LegacyBM25Similarity(k1, b, discountOverlaps); } public static BooleanSimilarity createBooleanSimilarity(Settings settings, Version indexCreatedVersion) { diff --git a/server/src/main/java/org/opensearch/index/similarity/SimilarityService.java b/server/src/main/java/org/opensearch/index/similarity/SimilarityService.java index a183457ffbc72..bb82d53de815a 100644 --- a/server/src/main/java/org/opensearch/index/similarity/SimilarityService.java +++ b/server/src/main/java/org/opensearch/index/similarity/SimilarityService.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TermStatistics; @@ -42,7 +43,6 @@ import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity.SimScorer; -import org.apache.lucene.search.similarity.LegacyBM25Similarity; import org.apache.lucene.util.BytesRef; import org.opensearch.LegacyESVersion; import org.opensearch.Version; diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index 2b47c5845a394..f4a94023a8ac8 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -50,6 +50,7 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.BufferedChecksum; +import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; @@ -66,7 +67,6 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.Streams; -import org.opensearch.common.io.stream.BytesStreamInput; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -669,9 +669,7 @@ public void cleanupAndVerify(String reason, MetadataSnapshot sourceMetadata) thr directory.deleteFile(reason, existingFile); // FNF should not happen since we hold a write lock? } catch (IOException ex) { - if (existingFile.startsWith(IndexFileNames.SEGMENTS) - || existingFile.equals(IndexFileNames.OLD_SEGMENTS_GEN) - || existingFile.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { + if (existingFile.startsWith(IndexFileNames.SEGMENTS) || existingFile.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { // TODO do we need to also fail this if we can't delete the pending commit file? // if one of those files can't be deleted we better fail the cleanup otherwise we might leave an old commit // point around? @@ -1053,9 +1051,6 @@ public RecoveryDiff recoveryDiff(MetadataSnapshot recoveryTargetSnapshot) { final List perCommitStoreFiles = new ArrayList<>(); for (StoreFileMetadata meta : this) { - if (IndexFileNames.OLD_SEGMENTS_GEN.equals(meta.name())) { // legacy - continue; // we don't need that file at all - } final String segmentId = IndexFileNames.parseSegmentName(meta.name()); final String extension = IndexFileNames.getExtension(meta.name()); if (IndexFileNames.SEGMENTS.equals(segmentId) @@ -1095,14 +1090,11 @@ public RecoveryDiff recoveryDiff(MetadataSnapshot recoveryTargetSnapshot) { Collections.unmodifiableList(different), Collections.unmodifiableList(missing) ); - assert recoveryDiff.size() == this.metadata.size() - (metadata.containsKey(IndexFileNames.OLD_SEGMENTS_GEN) ? 1 : 0) - : "some files are missing recoveryDiff size: [" - + recoveryDiff.size() - + "] metadata size: [" - + this.metadata.size() - + "] contains segments.gen: [" - + metadata.containsKey(IndexFileNames.OLD_SEGMENTS_GEN) - + "]"; + assert recoveryDiff.size() == this.metadata.size() : "some files are missing recoveryDiff size: [" + + recoveryDiff.size() + + "] metadata size: [" + + this.metadata.size() + + "]"; return recoveryDiff; } @@ -1236,7 +1228,7 @@ public void verify() throws IOException { String footerDigest = null; if (metadata.checksum().equals(actualChecksum) && writtenBytes == metadata.length()) { ByteArrayIndexInput indexInput = new ByteArrayIndexInput("checksum", this.footerChecksum); - footerDigest = digestToString(indexInput.readLong()); + footerDigest = digestToString(CodecUtil.readBELong(indexInput)); if (metadata.checksum().equals(footerDigest)) { return; } @@ -1393,9 +1385,9 @@ public void seek(long pos) throws IOException { // skipping the verified portion input.seek(verifiedPosition); // and checking unverified - skipBytes(pos - verifiedPosition); + super.seek(pos); } else { - skipBytes(pos - getFilePointer()); + super.seek(pos); } } } @@ -1425,8 +1417,12 @@ public IndexInput slice(String sliceDescription, long offset, long length) throw throw new UnsupportedOperationException(); } - public long getStoredChecksum() throws IOException { - return new BytesStreamInput(checksum).readLong(); + public long getStoredChecksum() { + try { + return CodecUtil.readBELong(new ByteArrayDataInput(checksum)); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } public long verify() throws CorruptIndexException, IOException { diff --git a/server/src/main/java/org/opensearch/index/translog/Checkpoint.java b/server/src/main/java/org/opensearch/index/translog/Checkpoint.java index b8346a26da8e6..0cff884cabfaa 100644 --- a/server/src/main/java/org/opensearch/index/translog/Checkpoint.java +++ b/server/src/main/java/org/opensearch/index/translog/Checkpoint.java @@ -32,6 +32,7 @@ package org.opensearch.index.translog; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; @@ -64,11 +65,12 @@ final class Checkpoint { final long minTranslogGeneration; final long trimmedAboveSeqNo; - private static final int CURRENT_VERSION = 3; // introduction of trimmed above seq# + private static final int VERSION_LUCENE_BIG_ENDIAN = 3; // big endian format (Lucene 9+ switches to little endian) + private static final int CURRENT_VERSION = 4; // introduction of trimmed above seq# private static final String CHECKPOINT_CODEC = "ckp"; - static final int V3_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC) + Integer.BYTES // ops + static final int V4_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC) + Integer.BYTES // ops + Long.BYTES // offset + Long.BYTES // generation + Long.BYTES // minimum sequence number @@ -153,6 +155,10 @@ static Checkpoint emptyTranslogCheckpoint( } static Checkpoint readCheckpointV3(final DataInput in) throws IOException { + return readCheckpointV4(EndiannessReverserUtil.wrapDataInput(in)); + } + + static Checkpoint readCheckpointV4(final DataInput in) throws IOException { final long offset = in.readLong(); final int numOps = in.readInt(); final long generation = in.readLong(); @@ -191,10 +197,10 @@ public static Checkpoint read(Path path) throws IOException { try (IndexInput indexInput = dir.openInput(path.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); - final int fileVersion = CodecUtil.checkHeader(indexInput, CHECKPOINT_CODEC, CURRENT_VERSION, CURRENT_VERSION); - assert fileVersion == CURRENT_VERSION : fileVersion; - assert indexInput.length() == V3_FILE_SIZE : indexInput.length(); - return Checkpoint.readCheckpointV3(indexInput); + final int fileVersion = CodecUtil.checkHeader(indexInput, CHECKPOINT_CODEC, VERSION_LUCENE_BIG_ENDIAN, CURRENT_VERSION); + assert fileVersion == CURRENT_VERSION || fileVersion == VERSION_LUCENE_BIG_ENDIAN : fileVersion; + assert indexInput.length() == V4_FILE_SIZE : indexInput.length(); + return fileVersion == CURRENT_VERSION ? Checkpoint.readCheckpointV4(indexInput) : Checkpoint.readCheckpointV3(indexInput); } catch (CorruptIndexException | NoSuchFileException | IndexFormatTooOldException | IndexFormatTooNewException e) { throw new TranslogCorruptedException(path.toString(), e); } @@ -207,9 +213,8 @@ public static void write(ChannelFactory factory, Path checkpointFile, Checkpoint // now go and write to the channel, in one go. try (FileChannel channel = factory.open(checkpointFile, options)) { Channels.writeToChannel(bytes, channel); - // no need to force metadata, file size stays the same and we did the full fsync - // when we first created the file, so the directory entry doesn't change as well - channel.force(false); + // force fsync with metadata since this is used on file creation + channel.force(true); } } @@ -222,7 +227,7 @@ public static void write(FileChannel fileChannel, Path checkpointFile, Checkpoin } private static byte[] createCheckpointBytes(Path checkpointFile, Checkpoint checkpoint) throws IOException { - final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(V3_FILE_SIZE) { + final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(V4_FILE_SIZE) { @Override public synchronized byte[] toByteArray() { // don't clone @@ -235,17 +240,17 @@ public synchronized byte[] toByteArray() { resourceDesc, checkpointFile.toString(), byteOutputStream, - V3_FILE_SIZE + V4_FILE_SIZE ) ) { CodecUtil.writeHeader(indexOutput, CHECKPOINT_CODEC, CURRENT_VERSION); checkpoint.write(indexOutput); CodecUtil.writeFooter(indexOutput); - assert indexOutput.getFilePointer() == V3_FILE_SIZE : "get you numbers straight; bytes written: " + assert indexOutput.getFilePointer() == V4_FILE_SIZE : "get you numbers straight; bytes written: " + indexOutput.getFilePointer() + ", buffer size: " - + V3_FILE_SIZE; + + V4_FILE_SIZE; assert indexOutput.getFilePointer() < 512 : "checkpoint files have to be smaller than 512 bytes for atomic writes; size: " + indexOutput.getFilePointer(); } diff --git a/server/src/main/java/org/opensearch/index/translog/Translog.java b/server/src/main/java/org/opensearch/index/translog/Translog.java index 2586599d3ed59..25d84efbee38f 100644 --- a/server/src/main/java/org/opensearch/index/translog/Translog.java +++ b/server/src/main/java/org/opensearch/index/translog/Translog.java @@ -2018,7 +2018,6 @@ public static String createEmptyTranslog( final Checkpoint checkpoint = Checkpoint.emptyTranslogCheckpoint(0, generation, initialGlobalCheckpoint, minTranslogGeneration); Checkpoint.write(channelFactory, checkpointFile, checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW); - IOUtils.fsync(checkpointFile, false); final TranslogWriter writer = TranslogWriter.create( shardId, uuid, diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogReader.java b/server/src/main/java/org/opensearch/index/translog/TranslogReader.java index 6d1ae86ebccec..3c47ce2207e4b 100644 --- a/server/src/main/java/org/opensearch/index/translog/TranslogReader.java +++ b/server/src/main/java/org/opensearch/index/translog/TranslogReader.java @@ -111,7 +111,6 @@ TranslogReader closeIntoTrimmedReader(long aboveSeqNo, ChannelFactory channelFac ); Checkpoint.write(channelFactory, checkpointFile, newCheckpoint, StandardOpenOption.WRITE); - IOUtils.fsync(checkpointFile, false); IOUtils.fsync(checkpointFile.getParent(), true); newReader = new TranslogReader(newCheckpoint, channel, path, header); diff --git a/server/src/main/java/org/opensearch/index/translog/TruncateTranslogAction.java b/server/src/main/java/org/opensearch/index/translog/TruncateTranslogAction.java index cac4cdb610cda..7d30fe2e2ffe3 100644 --- a/server/src/main/java/org/opensearch/index/translog/TruncateTranslogAction.java +++ b/server/src/main/java/org/opensearch/index/translog/TruncateTranslogAction.java @@ -245,8 +245,6 @@ private static void writeEmptyCheckpoint(Path filename, int translogLength, long StandardOpenOption.READ, StandardOpenOption.CREATE_NEW ); - // fsync with metadata here to make sure. - IOUtils.fsync(filename, false); } /** diff --git a/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java b/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java index 9c7d66457ce15..2cbb83fde278d 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java @@ -35,7 +35,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.LRUQueryCache; @@ -154,11 +153,6 @@ protected CachingWeightWrapper(Weight in) { this.in = in; } - @Override - public void extractTerms(Set terms) { - in.extractTerms(terms); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { shardKeyMap.add(context.reader()); diff --git a/server/src/main/java/org/opensearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/opensearch/indices/analysis/AnalysisModule.java index 857c093a64e19..5d43bfa8876c8 100644 --- a/server/src/main/java/org/opensearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/opensearch/indices/analysis/AnalysisModule.java @@ -228,7 +228,7 @@ static Map setupPreConfiguredTokenFilters(List })); /* Note that "stop" is available in lucene-core but it's pre-built * version uses a set of English stop words that are in - * lucene-analyzers-common so "stop" is defined in the analysis-common + * lucene-analysis-common so "stop" is defined in the analysis-common * module. */ for (AnalysisPlugin plugin : plugins) { diff --git a/server/src/main/java/org/opensearch/indices/analysis/PreBuiltAnalyzers.java b/server/src/main/java/org/opensearch/indices/analysis/PreBuiltAnalyzers.java index 21afbf7b15753..640100bbcc082 100644 --- a/server/src/main/java/org/opensearch/indices/analysis/PreBuiltAnalyzers.java +++ b/server/src/main/java/org/opensearch/indices/analysis/PreBuiltAnalyzers.java @@ -33,12 +33,12 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.classic.ClassicAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.analysis.en.EnglishAnalyzer; -import org.apache.lucene.analysis.standard.ClassicAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.opensearch.Version; import org.opensearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; @@ -50,9 +50,7 @@ public enum PreBuiltAnalyzers { STANDARD(CachingStrategy.OPENSEARCH) { @Override protected Analyzer create(Version version) { - final Analyzer a = new StandardAnalyzer(CharArraySet.EMPTY_SET); - a.setVersion(version.luceneVersion); - return a; + return new StandardAnalyzer(CharArraySet.EMPTY_SET); } }, @@ -75,36 +73,28 @@ protected Analyzer create(Version version) { STOP { @Override protected Analyzer create(Version version) { - Analyzer a = new StopAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); - a.setVersion(version.luceneVersion); - return a; + return new StopAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); } }, WHITESPACE { @Override protected Analyzer create(Version version) { - Analyzer a = new WhitespaceAnalyzer(); - a.setVersion(version.luceneVersion); - return a; + return new WhitespaceAnalyzer(); } }, SIMPLE { @Override protected Analyzer create(Version version) { - Analyzer a = new SimpleAnalyzer(); - a.setVersion(version.luceneVersion); - return a; + return new SimpleAnalyzer(); } }, CLASSIC { @Override protected Analyzer create(Version version) { - Analyzer a = new ClassicAnalyzer(); - a.setVersion(version.luceneVersion); - return a; + return new ClassicAnalyzer(); } }; diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index 684c401716883..d7c3421b1de93 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -216,7 +216,6 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi final TransportRequest requestToSend; final StartRecoveryRequest startRequest; final RecoveryState.Timer timer; - CancellableThreads cancellableThreads; try (RecoveryRef recoveryRef = onGoingRecoveries.getRecovery(recoveryId)) { if (recoveryRef == null) { logger.trace("not running recovery with id [{}] - can not find it (probably finished)", recoveryId); @@ -224,7 +223,6 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi } final RecoveryTarget recoveryTarget = recoveryRef.get(); timer = recoveryTarget.state().getTimer(); - cancellableThreads = recoveryTarget.cancellableThreads(); if (preExistingRequest == null) { try { final IndexShard indexShard = recoveryTarget.indexShard(); @@ -256,21 +254,12 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi logger.trace("{} reestablishing recovery from {}", startRequest.shardId(), startRequest.sourceNode()); } } - RecoveryResponseHandler responseHandler = new RecoveryResponseHandler(startRequest, timer); - - try { - cancellableThreads.executeIO(() -> - // we still execute under cancelableThreads here to ensure we interrupt any blocking call to the network if any - // on the underlying transport. It's unclear if we need this here at all after moving to async execution but - // the issues that a missing call to this could cause are sneaky and hard to debug. If we don't need it on this - // call we can potentially remove it altogether which we should do it in a major release only with enough - // time to test. This shoudl be done for 7.0 if possible - transportService.sendRequest(startRequest.sourceNode(), actionName, requestToSend, responseHandler)); - } catch (CancellableThreads.ExecutionCancelledException e) { - logger.trace("recovery cancelled", e); - } catch (Exception e) { - responseHandler.onException(e); - } + transportService.sendRequest( + startRequest.sourceNode(), + actionName, + requestToSend, + new RecoveryResponseHandler(startRequest, timer) + ); } /** diff --git a/server/src/main/java/org/apache/lucene/queries/MinDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java similarity index 96% rename from server/src/main/java/org/apache/lucene/queries/MinDocQuery.java rename to server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java index ae68f93349ce5..a7dbadb32ccf2 100644 --- a/server/src/main/java/org/apache/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -39,6 +39,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; @@ -160,6 +161,11 @@ public long cost() { } } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "MinDocQuery(minDoc=" + minDoc + ")"; diff --git a/server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java similarity index 97% rename from server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java rename to server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java index 45df15651bfa2..fd4d84fabe9c7 100644 --- a/server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConstantScoreScorer; @@ -41,6 +41,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafFieldComparator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; @@ -114,6 +115,11 @@ public boolean isCacheable(LeafReaderContext ctx) { }; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "SearchAfterSortedDocQuery(sort=" + sort + ", afterDoc=" + after.toString() + ")"; diff --git a/server/src/main/java/org/opensearch/plugins/PluginsService.java b/server/src/main/java/org/opensearch/plugins/PluginsService.java index 8e1bb4c510042..4ef2dc4617de9 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginsService.java +++ b/server/src/main/java/org/opensearch/plugins/PluginsService.java @@ -34,9 +34,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.analysis.util.CharFilterFactory; -import org.apache.lucene.analysis.util.TokenFilterFactory; -import org.apache.lucene.analysis.util.TokenizerFactory; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; @@ -748,10 +745,6 @@ static void reloadLuceneSPI(ClassLoader loader) { PostingsFormat.reloadPostingsFormats(loader); DocValuesFormat.reloadDocValuesFormats(loader); Codec.reloadCodecs(loader); - // Analysis: - CharFilterFactory.reloadCharFilters(loader); - TokenFilterFactory.reloadTokenFilters(loader); - TokenizerFactory.reloadTokenizers(loader); } private Class loadPluginClass(String className, ClassLoader loader) { diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index a641f2e625e16..f6c76664c8988 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -324,7 +324,7 @@ public Query buildFilteredQuery(Query query) { if (mapperService().hasNested() && new NestedHelper(mapperService()).mightMatchNestedDocs(query) && (aliasFilter == null || new NestedHelper(mapperService()).mightMatchNestedDocs(aliasFilter))) { - filters.add(Queries.newNonNestedFilter(mapperService().getIndexSettings().getIndexVersionCreated())); + filters.add(Queries.newNonNestedFilter()); } if (aliasFilter != null) { diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index de4586efd60b1..0ffe859879453 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -1088,7 +1088,11 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc context.fetchSourceContext(source.fetchSource()); } if (source.docValueFields() != null) { - FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(context.mapperService(), source.docValueFields()); + FetchDocValuesContext docValuesContext = FetchDocValuesContext.create( + context.mapperService()::simpleMatchToFullName, + context.mapperService().getIndexSettings().getMaxDocvalueFields(), + source.docValueFields() + ); context.docValuesContext(docValuesContext); } if (source.fetchFields() != null) { diff --git a/server/src/main/java/org/opensearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/opensearch/search/aggregations/MultiBucketCollector.java index 4334afbe30454..6bb044b1d7ea8 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/opensearch/search/aggregations/MultiBucketCollector.java @@ -189,7 +189,7 @@ private MultiLeafBucketCollector(List collectors, boolean c @Override public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { - scorer = new ScoreCachingWrappingScorer(scorer); + scorer = ScoreCachingWrappingScorer.wrap(scorer); } for (int i = 0; i < numCollectors; ++i) { final LeafCollector c = collectors[i]; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java index 1d48850bee122..73dc838a36198 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.queries.SearchAfterSortedDocQuery; +import org.opensearch.lucene.queries.SearchAfterSortedDocQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.CollectionTerminatedException; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index d08f8d0d95931..8dbb902b96186 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -38,9 +38,9 @@ import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.Query; import org.apache.lucene.util.DocIdSetBuilder; -import org.apache.lucene.util.FutureArrays; import java.io.IOException; +import java.util.Arrays; import java.util.function.ToLongFunction; /** @@ -166,10 +166,9 @@ public void visit(int docID, byte[] packedValue) throws IOException { @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if ((upperPointQuery != null - && FutureArrays.compareUnsigned(minPackedValue, 0, bytesPerDim, upperPointQuery, 0, bytesPerDim) > 0) + if ((upperPointQuery != null && Arrays.compareUnsigned(minPackedValue, 0, bytesPerDim, upperPointQuery, 0, bytesPerDim) > 0) || (lowerPointQuery != null - && FutureArrays.compareUnsigned(maxPackedValue, 0, bytesPerDim, lowerPointQuery, 0, bytesPerDim) < 0)) { + && Arrays.compareUnsigned(maxPackedValue, 0, bytesPerDim, lowerPointQuery, 0, bytesPerDim) < 0)) { // does not match the query return PointValues.Relation.CELL_OUTSIDE_QUERY; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregator.java index 38371e5fcaeee..a003f1380253b 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -82,9 +82,7 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA ) throws IOException { super(name, factories, context, parent, cardinality, metadata); - Query parentFilter = parentObjectMapper != null - ? parentObjectMapper.nestedTypeFilter() - : Queries.newNonNestedFilter(context.mapperService().getIndexSettings().getIndexVersionCreated()); + Query parentFilter = parentObjectMapper != null ? parentObjectMapper.nestedTypeFilter() : Queries.newNonNestedFilter(); this.parentFilter = context.bitsetFilterCache().getBitSetProducer(parentFilter); this.childFilter = childObjectMapper.nestedTypeFilter(); this.collectsFromSingleBucket = cardinality.map(estimate -> estimate < 2); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 2d5f6cb324aaf..689304215e9c7 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -71,7 +71,7 @@ public ReverseNestedAggregator( ) throws IOException { super(name, factories, context, parent, cardinality, metadata); if (objectMapper == null) { - parentFilter = Queries.newNonNestedFilter(context.mapperService().getIndexSettings().getIndexVersionCreated()); + parentFilter = Queries.newNonNestedFilter(); } else { parentFilter = objectMapper.nestedTypeFilter(); } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java index 2c44bd5c059fd..c0b3536838430 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java @@ -34,8 +34,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.search.DiversifiedTopDocsCollector; -import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java index 627753d7cdb71..fdb00eed24c0d 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java @@ -34,8 +34,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.search.DiversifiedTopDocsCollector; -import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java index 8535705a1c820..8b4bed89e678c 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java @@ -35,8 +35,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.search.DiversifiedTopDocsCollector; -import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.opensearch.OpenSearchException; import org.opensearch.index.fielddata.AbstractNumericDocValues; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index 4d8237c5f42f8..62caa017eddba 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -37,8 +37,8 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.search.DiversifiedTopDocsCollector; -import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.opensearch.index.fielddata.AbstractNumericDocValues; import org.opensearch.search.aggregations.Aggregator; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 56d93e2e498f9..a7855c2b400f1 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -32,7 +32,7 @@ package org.opensearch.search.aggregations.bucket.sampler; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.RamUsageEstimator; import org.opensearch.common.ParseField; diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java index de4ad1376be06..24e7875a6ade9 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java @@ -141,16 +141,22 @@ public InternalAggregation reduce(List aggregations, Reduce InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i); shardDocs[i] = topHitsAgg.topDocs.topDocs; shardHits[i] = topHitsAgg.searchHits; + for (ScoreDoc doc : shardDocs[i].scoreDocs) { + doc.shardIndex = i; + } } - reducedTopDocs = TopDocs.merge(sort, from, size, (TopFieldDocs[]) shardDocs, true); + reducedTopDocs = TopDocs.merge(sort, from, size, (TopFieldDocs[]) shardDocs); } else { shardDocs = new TopDocs[aggregations.size()]; for (int i = 0; i < shardDocs.length; i++) { InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i); shardDocs[i] = topHitsAgg.topDocs.topDocs; shardHits[i] = topHitsAgg.searchHits; + for (ScoreDoc doc : shardDocs[i].scoreDocs) { + doc.shardIndex = i; + } } - reducedTopDocs = TopDocs.merge(from, size, shardDocs, true); + reducedTopDocs = TopDocs.merge(from, size, shardDocs); } float maxScore = Float.NaN; diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java index e1f6273f62fb6..b755d54de0fe5 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java @@ -37,7 +37,6 @@ import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.Bits; -import org.apache.lucene.util.FutureArrays; import org.opensearch.common.lease.Releasables; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.DoubleArray; @@ -54,6 +53,7 @@ import org.opensearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Arrays; import java.util.Map; import java.util.function.Function; @@ -200,7 +200,7 @@ public void visit(int docID, byte[] packedValue) { @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if (FutureArrays.equals(maxValue, 0, numBytes, maxPackedValue, 0, numBytes)) { + if (Arrays.equals(maxValue, 0, numBytes, maxPackedValue, 0, numBytes)) { // we only check leaves that contain the max value for the segment. return PointValues.Relation.CELL_CROSSES_QUERY; } else { diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorFactory.java index 81fa4b9f83d15..b9699964a611e 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorFactory.java @@ -127,7 +127,11 @@ public Aggregator createInternal( subSearchContext.storedFieldsContext(storedFieldsContext); } if (docValueFields != null) { - FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(searchContext.mapperService(), docValueFields); + FetchDocValuesContext docValuesContext = FetchDocValuesContext.create( + searchContext.mapperService()::simpleMatchToFullName, + searchContext.mapperService().getIndexSettings().getMaxDocvalueFields(), + docValueFields + ); subSearchContext.docValuesContext(docValuesContext); } if (fetchFields != null) { diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java index a74497477099a..e50f903b22920 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java @@ -83,6 +83,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static java.util.Collections.emptyMap; @@ -273,9 +274,7 @@ private boolean sourceRequired(SearchContext context) { private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException { if (context.mapperService().hasNested()) { - BitSet bits = context.bitsetFilterCache() - .getBitSetProducer(Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated())) - .getBitSet(subReaderContext); + BitSet bits = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()).getBitSet(subReaderContext); if (!bits.get(subDocId)) { return bits.nextSetBit(subDocId); } @@ -333,7 +332,7 @@ private HitContext prepareNonNestedHitContext( return new HitContext(hit, subReaderContext, subDocId, lookup.source()); } else { SearchHit hit; - loadStoredFields(context.mapperService(), fieldReader, fieldsVisitor, subDocId); + loadStoredFields(context::fieldType, fieldReader, fieldsVisitor, subDocId); String id = fieldsVisitor.id(); if (fieldsVisitor.fields().isEmpty() == false) { Map docFields = new HashMap<>(); @@ -391,8 +390,8 @@ private HitContext prepareNestedHitContext( } } else { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); - loadStoredFields(context.mapperService(), storedFieldReader, rootFieldsVisitor, rootDocId); - rootFieldsVisitor.postProcess(context.mapperService()); + loadStoredFields(context::fieldType, storedFieldReader, rootFieldsVisitor, rootDocId); + rootFieldsVisitor.postProcess(context::fieldType); rootId = rootFieldsVisitor.id(); if (needSource) { @@ -410,7 +409,7 @@ private HitContext prepareNestedHitContext( Map metaFields = emptyMap(); if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); - loadStoredFields(context.mapperService(), storedFieldReader, nestedFieldsVisitor, nestedDocId); + loadStoredFields(context::fieldType, storedFieldReader, nestedFieldsVisitor, nestedDocId); if (nestedFieldsVisitor.fields().isEmpty() == false) { docFields = new HashMap<>(); metaFields = new HashMap<>(); @@ -508,7 +507,7 @@ private SearchHit.NestedIdentity getInternalNestedIdentity( } parentFilter = nestedParentObjectMapper.nestedTypeFilter(); } else { - parentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated()); + parentFilter = Queries.newNonNestedFilter(); } Query childFilter = nestedObjectMapper.nestedTypeFilter(); @@ -553,14 +552,14 @@ private SearchHit.NestedIdentity getInternalNestedIdentity( } private void loadStoredFields( - MapperService mapperService, + Function fieldTypeLookup, CheckedBiConsumer fieldReader, FieldsVisitor fieldVisitor, int docId ) throws IOException { fieldVisitor.reset(); fieldReader.accept(docId, fieldVisitor); - fieldVisitor.postProcess(mapperService); + fieldVisitor.postProcess(fieldTypeLookup); } private static void fillDocAndMetaFields( diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/FetchDocValuesContext.java b/server/src/main/java/org/opensearch/search/fetch/subphase/FetchDocValuesContext.java index df463a667b2e3..3bfb3365fe46e 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/FetchDocValuesContext.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/FetchDocValuesContext.java @@ -32,11 +32,12 @@ package org.opensearch.search.fetch.subphase; import org.opensearch.index.IndexSettings; -import org.opensearch.index.mapper.MapperService; import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.Set; +import java.util.function.Function; /** * All the required context to pull a field from the doc values. @@ -44,15 +45,18 @@ public class FetchDocValuesContext { private final List fields; - public static FetchDocValuesContext create(MapperService mapperService, List fieldPatterns) { + public static FetchDocValuesContext create( + Function> simpleMatchToFullName, + int maxAllowedDocvalueFields, + List fieldPatterns + ) { List fields = new ArrayList<>(); for (FieldAndFormat field : fieldPatterns) { - Collection fieldNames = mapperService.simpleMatchToFullName(field.field); + Collection fieldNames = simpleMatchToFullName.apply(field.field); for (String fieldName : fieldNames) { fields.add(new FieldAndFormat(fieldName, field.format)); } } - int maxAllowedDocvalueFields = mapperService.getIndexSettings().getMaxDocvalueFields(); if (fields.size() > maxAllowedDocvalueFields) { throw new IllegalArgumentException( "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [" diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java index d7ac7d21f1922..22acd599d18c6 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java @@ -35,7 +35,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.Collector; -import org.apache.lucene.search.ConjunctionDISI; +import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.ScoreMode; @@ -187,7 +187,7 @@ public static void intersect(Weight weight, Weight innerHitQueryWeight, Collecto try { Bits acceptDocs = ctx.reader().getLiveDocs(); - DocIdSetIterator iterator = ConjunctionDISI.intersectIterators( + DocIdSetIterator iterator = ConjunctionUtils.intersectIterators( Arrays.asList(innerHitQueryScorer.iterator(), scorer.iterator()) ); for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 8f0c434674feb..8e97fc3a27ffb 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -48,7 +48,6 @@ import org.opensearch.common.text.Text; import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.KeywordFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.TextSearchInfo; import org.opensearch.index.query.QueryShardContext; @@ -134,14 +133,6 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) th ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; int maxAnalyzedOffset = fieldContext.context.getIndexSettings().getHighlightMaxAnalyzedOffset(); - int keywordIgnoreAbove = Integer.MAX_VALUE; - if (fieldContext.fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - KeywordFieldMapper mapper = (KeywordFieldMapper) fieldContext.context.mapperService() - .documentMapper() - .mappers() - .getMapper(fieldContext.fieldName); - keywordIgnoreAbove = mapper.ignoreAbove(); - } int numberOfFragments = fieldContext.field.fieldOptions().numberOfFragments(); Analyzer analyzer = getAnalyzer(fieldContext.context.mapperService().documentMapper()); PassageFormatter passageFormatter = getPassageFormatter(fieldContext.hitContext, fieldContext.field, encoder); @@ -178,7 +169,6 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) th fieldContext.field.fieldOptions().noMatchSize(), higlighterNumberOfFragments, fieldMatcher(fieldContext), - keywordIgnoreAbove, maxAnalyzedOffset ); } diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java index dad21d024ad49..2cc15d4c65b96 100644 --- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java @@ -41,7 +41,7 @@ import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; -import org.apache.lucene.search.ConjunctionDISI; +import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -291,10 +291,6 @@ private void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collecto private Weight wrapWeight(Weight weight) { if (cancellable.isEnabled()) { return new Weight(weight.getQuery()) { - @Override - public void extractTerms(Set terms) { - throw new UnsupportedOperationException(); - } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { @@ -344,7 +340,7 @@ static void intersectScorerAndBitSet(Scorer scorer, BitSet acceptDocs, LeafColle collector.setScorer(scorer); // ConjunctionDISI uses the DocIdSetIterator#cost() to order the iterators, so if roleBits has the lowest cardinality it should // be used first: - DocIdSetIterator iterator = ConjunctionDISI.intersectIterators( + DocIdSetIterator iterator = ConjunctionUtils.intersectIterators( Arrays.asList(new BitSetIterator(acceptDocs, acceptDocs.approximateCardinality()), scorer.iterator()) ); int seen = 0; diff --git a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java index 30c36b70bc85c..56b69ab3fb265 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java @@ -33,7 +33,6 @@ package org.opensearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; @@ -44,7 +43,6 @@ import org.opensearch.search.profile.Timer; import java.io.IOException; -import java.util.Set; /** * Weight wrapper that will compute how much time it takes to build the @@ -128,8 +126,8 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public void extractTerms(Set set) { - subQueryWeight.extractTerms(set); + public int count(LeafReaderContext context) throws IOException { + return subQueryWeight.count(context); } @Override diff --git a/server/src/main/java/org/opensearch/search/query/QueryPhase.java b/server/src/main/java/org/opensearch/search/query/QueryPhase.java index e78741f48a223..7d4b8738c1800 100644 --- a/server/src/main/java/org/opensearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/opensearch/search/query/QueryPhase.java @@ -36,8 +36,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.queries.MinDocQuery; -import org.apache.lucene.queries.SearchAfterSortedDocQuery; +import org.opensearch.lucene.queries.MinDocQuery; +import org.opensearch.lucene.queries.SearchAfterSortedDocQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; @@ -47,7 +47,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; -import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.opensearch.action.search.SearchShardTask; @@ -55,9 +54,6 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.util.concurrent.QueueResizingOpenSearchThreadPoolExecutor; -import org.opensearch.index.IndexSortConfig; -import org.opensearch.index.mapper.DateFieldMapper.DateFieldType; -import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.search.DocValueFormat; import org.opensearch.search.SearchContextSourcePrinter; import org.opensearch.search.SearchService; @@ -235,10 +231,6 @@ static boolean executeInternal(SearchContext searchContext) throws QueryPhaseExe // this collector can filter documents during the collection hasFilterCollector = true; } - // optimizing sort on Numerics (long and date) - if ((searchContext.sort() != null) && SYS_PROP_REWRITE_SORT) { - enhanceSortOnNumeric(searchContext, searcher.getIndexReader()); - } boolean timeoutSet = scrollContext == null && searchContext.timeout() != null @@ -332,27 +324,6 @@ private static boolean searchWithCollector( return topDocsFactory.shouldRescore(); } - private static void enhanceSortOnNumeric(SearchContext searchContext, IndexReader reader) { - if (canEarlyTerminate(reader, searchContext.sort())) { - // disable this optimization if index sorting matches the query sort since it's already optimized by index searcher - return; - } - Sort sort = searchContext.sort().sort; - SortField sortField = sort.getSort()[0]; - if (SortField.Type.LONG.equals(IndexSortConfig.getSortFieldType(sortField)) == false) return; - - // check if this is a field of type Long or Date, that is indexed and has doc values - String fieldName = sortField.getField(); - if (fieldName == null) return; // happens when _score or _doc is the 1st sort field - if (searchContext.mapperService() == null) return; // mapperService can be null in tests - final MappedFieldType fieldType = searchContext.mapperService().fieldType(fieldName); - if (fieldType == null) return; // for unmapped fields, default behaviour depending on "unmapped_type" flag - if ((fieldType.typeName().equals("long") == false) && (fieldType instanceof DateFieldType == false)) return; - if (fieldType.isSearchable() == false) return; - if (fieldType.hasDocValues() == false) return; - sortField.setCanUsePoints(); - } - /** * Returns true if the provided query returns docs in index order (internal doc ids). * @param query The query to execute diff --git a/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java b/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java index a007969ffd108..9cf7dca3c4caf 100644 --- a/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java +++ b/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java @@ -40,6 +40,7 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Collector; @@ -63,7 +64,6 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.search.grouping.CollapsingTopDocsCollector; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.action.search.MaxScoreCollector; import org.opensearch.common.Nullable; import org.opensearch.common.lucene.Lucene; diff --git a/server/src/main/java/org/opensearch/search/slice/SliceQuery.java b/server/src/main/java/org/opensearch/search/slice/SliceQuery.java index 9dd1b557b34c7..5b2c97e44a521 100644 --- a/server/src/main/java/org/opensearch/search/slice/SliceQuery.java +++ b/server/src/main/java/org/opensearch/search/slice/SliceQuery.java @@ -33,6 +33,7 @@ package org.opensearch.search.slice; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import java.util.Objects; @@ -91,4 +92,10 @@ public String toString(String f) { return getClass().getSimpleName() + "[field=" + field + ", id=" + id + ", max=" + max + "]"; } + @Override + public void visit(QueryVisitor visitor) { + if (visitor.acceptField(field)) { + visitor.visitLeaf(this); + } + } } diff --git a/server/src/main/java/org/opensearch/search/sort/SortBuilder.java b/server/src/main/java/org/opensearch/search/sort/SortBuilder.java index 09470f0b2cb45..74a4d974c9555 100644 --- a/server/src/main/java/org/opensearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/opensearch/search/sort/SortBuilder.java @@ -201,7 +201,7 @@ protected static Nested resolveNested(QueryShardContext context, NestedSortBuild final ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); final Query parentQuery; if (objectMapper == null) { - parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated()); + parentQuery = Queries.newNonNestedFilter(); } else { parentQuery = objectMapper.nestedTypeFilter(); } diff --git a/server/src/main/resources/org/opensearch/bootstrap/security.policy b/server/src/main/resources/org/opensearch/bootstrap/security.policy index f51cfbd65a0f0..97b73aedf24bb 100644 --- a/server/src/main/resources/org/opensearch/bootstrap/security.policy +++ b/server/src/main/resources/org/opensearch/bootstrap/security.policy @@ -46,6 +46,8 @@ grant codeBase "${codebase.opensearch-secure-sm}" { grant codeBase "${codebase.opensearch}" { // needed for loading plugins which may expect the context class loader to be set permission java.lang.RuntimePermission "setContextClassLoader"; + // needed for SPI class loading + permission java.lang.RuntimePermission "accessDeclaredMembers"; }; //// Very special jar permissions: diff --git a/server/src/test/java/org/opensearch/LegacyESVersionTests.java b/server/src/test/java/org/opensearch/LegacyESVersionTests.java index aea6f2eebea16..8fb3636dd8b2c 100644 --- a/server/src/test/java/org/opensearch/LegacyESVersionTests.java +++ b/server/src/test/java/org/opensearch/LegacyESVersionTests.java @@ -195,9 +195,9 @@ public void testIsBeta() { } public void testIsAlpha() { - assertTrue(new LegacyESVersion(5000001, org.apache.lucene.util.Version.LUCENE_7_0_0).isAlpha()); - assertFalse(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_7_0_0).isAlpha()); - assertTrue(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_7_0_0).isBeta()); + assertTrue(new LegacyESVersion(5000001, org.apache.lucene.util.Version.LUCENE_8_0_0).isAlpha()); + assertFalse(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_8_0_0).isAlpha()); + assertTrue(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_8_0_0).isBeta()); assertTrue(LegacyESVersion.fromString("5.0.0-alpha14").isAlpha()); assertEquals(5000014, LegacyESVersion.fromString("5.0.0-alpha14").id); assertTrue(LegacyESVersion.fromId(5000015).isAlpha()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java index ca3b1f3f3815d..402d6439c4838 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java @@ -34,7 +34,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.common.settings.Settings; -import org.opensearch.index.engine.Segment; import org.opensearch.index.MergePolicyConfig; import org.opensearch.indices.IndexClosedException; import org.opensearch.plugins.Plugin; @@ -43,7 +42,6 @@ import org.junit.Before; import java.util.Collection; -import java.util.List; import static org.hamcrest.Matchers.is; @@ -71,18 +69,6 @@ public void setupIndex() { client().admin().indices().prepareRefresh().get(); } - public void testBasic() { - IndicesSegmentResponse rsp = client().admin().indices().prepareSegments("test").get(); - List segments = rsp.getIndices().get("test").iterator().next().getShards()[0].getSegments(); - assertNull(segments.get(0).toString(), segments.get(0).ramTree); - } - - public void testVerbose() { - IndicesSegmentResponse rsp = client().admin().indices().prepareSegments("test").setVerbose(true).get(); - List segments = rsp.getIndices().get("test").iterator().next().getShards()[0].getSegments(); - assertNotNull(segments.get(0).toString(), segments.get(0).ramTree); - } - /** * with the default IndicesOptions inherited from BroadcastOperationRequest this will raise an exception */ diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/IndexShardHotSpotTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/IndexShardHotSpotTests.java index 98fb9ae14dd5e..875432c9e6e11 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/IndexShardHotSpotTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/IndexShardHotSpotTests.java @@ -109,6 +109,7 @@ public void testClusterScaleIn() { /** * Test cluster scale in scenario with skewed shard distribution in remaining nodes. */ + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testClusterScaleInWithSkew() { setupInitialCluster(4, 100, 5, 1); buildAllocationService("node_0,node_1"); diff --git a/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java b/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java index 05b185beb57e9..69c431994ba7e 100644 --- a/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java @@ -56,6 +56,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -440,6 +441,11 @@ public void testAsSequentialAccessBits() throws Exception { private static class UnsupportedQuery extends Query { + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "Unsupported"; @@ -464,11 +470,6 @@ public boolean isCacheable(LeafReaderContext ctx) { return true; } - @Override - public void extractTerms(Set terms) { - throw new UnsupportedOperationException(); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { throw new UnsupportedOperationException(); diff --git a/server/src/test/java/org/opensearch/common/lucene/search/QueriesTests.java b/server/src/test/java/org/opensearch/common/lucene/search/QueriesTests.java index 25af50f02cd45..b9f128037a970 100644 --- a/server/src/test/java/org/opensearch/common/lucene/search/QueriesTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/search/QueriesTests.java @@ -48,9 +48,9 @@ public class QueriesTests extends OpenSearchTestCase { public void testNonNestedQuery() { for (Version version : VersionUtils.allVersions()) { // This is a custom query that extends AutomatonQuery and want to make sure the equals method works - assertEquals(Queries.newNonNestedFilter(version), Queries.newNonNestedFilter(version)); - assertEquals(Queries.newNonNestedFilter(version).hashCode(), Queries.newNonNestedFilter(version).hashCode()); - assertEquals(Queries.newNonNestedFilter(version), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME)); + assertEquals(Queries.newNonNestedFilter(), Queries.newNonNestedFilter()); + assertEquals(Queries.newNonNestedFilter().hashCode(), Queries.newNonNestedFilter().hashCode()); + assertEquals(Queries.newNonNestedFilter(), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME)); } } diff --git a/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java index b9112b3674c82..26674189f3cd8 100644 --- a/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java @@ -33,7 +33,6 @@ package org.opensearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.MatchAllDocsQuery; @@ -83,10 +82,6 @@ public int advance(int target) throws IOException { private static Weight fakeWeight() { return new Weight(new MatchAllDocsQuery()) { - @Override - public void extractTerms(Set terms) { - - } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { diff --git a/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java b/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java index 4547313639909..70e1e8d73ef3a 100644 --- a/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java +++ b/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java @@ -243,7 +243,7 @@ public static void corruptFile(Path fileToCorrupt, Logger logger) throws IOExcep assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - 8); // one long is the checksum... 8 bytes checksumAfterCorruption = input.getChecksum(); - actualChecksumAfterCorruption = input.readLong(); + actualChecksumAfterCorruption = CodecUtil.readBELong(input); } StringBuilder msg = new StringBuilder(); msg.append("Checksum before: [").append(checksumBeforeCorruption).append("]"); diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java index 745092678861c..66de4d03ebbbf 100644 --- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java @@ -34,17 +34,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat; -import org.apache.lucene.codecs.lucene87.Lucene87Codec; -import org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat; -import org.apache.lucene.document.BinaryDocValuesField; +import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; @@ -68,43 +65,21 @@ public class CodecTests extends OpenSearchTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene87Codec.class)); - assertThat(codecService.codec("Lucene87"), instanceOf(Lucene87Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene90Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertDVCompressionEquals(Lucene80DocValuesFormat.Mode.BEST_COMPRESSION, codec); - assertStoredFieldsFormatCompressionEquals(Lucene87StoredFieldsFormat.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene90Codec.Mode.BEST_SPEED, codec); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertDVCompressionEquals(Lucene80DocValuesFormat.Mode.BEST_COMPRESSION, codec); - assertStoredFieldsFormatCompressionEquals(Lucene87StoredFieldsFormat.Mode.BEST_COMPRESSION, codec); - } - - private void assertDVCompressionEquals(Lucene80DocValuesFormat.Mode expected, Codec actual) throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(null); - iwc.setCodec(actual); - IndexWriter iw = new IndexWriter(dir, iwc); - Document doc = new Document(); - doc.add(new BinaryDocValuesField("foo", new BytesRef("aaa"))); - iw.addDocument(doc); - iw.commit(); - iw.close(); - DirectoryReader ir = DirectoryReader.open(dir); - SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); - String v = sr.getFieldInfos().fieldInfo("foo").getAttribute(Lucene80DocValuesFormat.MODE_KEY); - assertNotNull(v); - assertEquals(expected, Lucene80DocValuesFormat.Mode.valueOf(v)); - ir.close(); - dir.close(); + assertStoredFieldsCompressionEquals(Lucene90Codec.Mode.BEST_COMPRESSION, codec); } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsFormatCompressionEquals(Lucene87StoredFieldsFormat.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene90Codec.Mode expected, Codec actual) throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(actual); @@ -114,9 +89,9 @@ private void assertStoredFieldsFormatCompressionEquals(Lucene87StoredFieldsForma iw.close(); DirectoryReader ir = DirectoryReader.open(dir); SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); - String v = sr.getSegmentInfo().info.getAttribute(Lucene87StoredFieldsFormat.MODE_KEY); + String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene87StoredFieldsFormat.Mode.valueOf(v)); + assertEquals(expected, Lucene90Codec.Mode.valueOf(v)); ir.close(); dir.close(); } diff --git a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java index 103c8c392be94..30285b1a3a014 100644 --- a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java @@ -32,14 +32,14 @@ package org.opensearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene87.Lucene87Codec; +import org.apache.lucene.codecs.lucene90.Lucene90Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.suggest.document.Completion84PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.opensearch.OpenSearchException; @@ -69,8 +69,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion84PostingsFormat(); - indexWriterConfig.setCodec(new Lucene87Codec() { + final PostingsFormat postingsFormat = new Completion90PostingsFormat(); + indexWriterConfig.setCodec(new Lucene90Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 33f09a3e67db8..5202e04990f95 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -317,7 +317,6 @@ public void testVerboseSegments() throws Exception { segments = engine.segments(true); assertThat(segments.size(), equalTo(1)); - assertThat(segments.get(0).ramTree, notNullValue()); ParsedDocument doc2 = testParsedDocument("2", null, testDocumentWithTextField(), B_2, null); engine.index(indexForDoc(doc2)); @@ -328,9 +327,6 @@ public void testVerboseSegments() throws Exception { segments = engine.segments(true); assertThat(segments.size(), equalTo(3)); - assertThat(segments.get(0).ramTree, notNullValue()); - assertThat(segments.get(1).ramTree, notNullValue()); - assertThat(segments.get(2).ramTree, notNullValue()); } } diff --git a/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java index 95f6e7998b128..f9c6c065904d7 100644 --- a/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java @@ -68,7 +68,7 @@ public void testRamBytesUsed() throws Exception { map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue()); } } - long actualRamBytesUsed = RamUsageTester.sizeOf(map); + long actualRamBytesUsed = RamUsageTester.ramUsed(map); long estimatedRamBytesUsed = map.ramBytesUsed(); // less than 50% off assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, actualRamBytesUsed / 2); @@ -84,7 +84,7 @@ public void testRamBytesUsed() throws Exception { map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue()); } } - actualRamBytesUsed = RamUsageTester.sizeOf(map); + actualRamBytesUsed = RamUsageTester.ramUsed(map); estimatedRamBytesUsed = map.ramBytesUsed(); long tolerance; if (Constants.JRE_IS_MINIMUM_JAVA9) { diff --git a/server/src/test/java/org/opensearch/index/engine/SegmentTests.java b/server/src/test/java/org/opensearch/index/engine/SegmentTests.java index 744b0d0cb4733..7b0072f83f9cb 100644 --- a/server/src/test/java/org/opensearch/index/engine/SegmentTests.java +++ b/server/src/test/java/org/opensearch/index/engine/SegmentTests.java @@ -92,7 +92,7 @@ static Segment randomSegment() { segment.sizeInBytes = randomNonNegativeLong(); segment.docCount = randomIntBetween(1, Integer.MAX_VALUE); segment.delDocCount = randomIntBetween(0, segment.docCount); - segment.version = Version.LUCENE_7_0_0; + segment.version = Version.LUCENE_8_0_0; segment.compound = randomBoolean(); segment.mergeId = randomAlphaOfLengthBetween(1, 10); segment.segmentSort = randomIndexSort(); diff --git a/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java b/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java index 3ca7cdade3509..dc6511b9e2632 100644 --- a/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java +++ b/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java @@ -44,12 +44,12 @@ public void testIndexRamBytesUsed() { translogLoc = new Translog.Location(randomNonNegativeLong(), randomNonNegativeLong(), randomInt()); } IndexVersionValue versionValue = new IndexVersionValue(translogLoc, randomLong(), randomLong(), randomLong()); - assertEquals(RamUsageTester.sizeOf(versionValue), versionValue.ramBytesUsed()); + assertEquals(RamUsageTester.ramUsed(versionValue), versionValue.ramBytesUsed()); } public void testDeleteRamBytesUsed() { DeleteVersionValue versionValue = new DeleteVersionValue(randomLong(), randomLong(), randomLong(), randomLong()); - assertEquals(RamUsageTester.sizeOf(versionValue), versionValue.ramBytesUsed()); + assertEquals(RamUsageTester.ramUsed(versionValue), versionValue.ramBytesUsed()); } } diff --git a/server/src/test/java/org/opensearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/DateFieldTypeTests.java index 20e0d5cfeec29..085343f4ff2f7 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DateFieldTypeTests.java @@ -40,9 +40,9 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.sandbox.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexOrDocValuesQuery; -import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.opensearch.Version; diff --git a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java index 1ac20f4d0dfe6..57f3f3693257b 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java @@ -36,16 +36,16 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.apache.lucene.sandbox.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; @@ -531,6 +531,7 @@ public void doTestDocValueRangeQueries(NumberType type, Supplier valueSu dir.close(); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testIndexSortIntRange() throws Exception { doTestIndexSortRangeQueries(NumberType.INTEGER, random()::nextInt); } diff --git a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java index 65776001381a0..b6e1818364328 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java @@ -155,7 +155,7 @@ public void testBytesAndNumericRepresentation() throws Exception { CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); searcher.doc(0, fieldsVisitor); - fieldsVisitor.postProcess(mapperService); + fieldsVisitor.postProcess(mapperService::fieldType); assertThat(fieldsVisitor.fields().size(), equalTo(10)); assertThat(fieldsVisitor.fields().get("field1").size(), equalTo(1)); assertThat(fieldsVisitor.fields().get("field1").get(0), equalTo((byte) 1)); diff --git a/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java index f33e59cb39208..2db5f2eea3596 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java @@ -51,6 +51,10 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MultiPhraseQuery; @@ -58,10 +62,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.common.Strings; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; diff --git a/server/src/test/java/org/opensearch/index/query/DisMaxQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/DisMaxQueryBuilderTests.java index 7bbebb82c03d0..bc8cd6c57b975 100644 --- a/server/src/test/java/org/opensearch/index/query/DisMaxQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/DisMaxQueryBuilderTests.java @@ -42,14 +42,9 @@ import java.io.IOException; import java.util.Collection; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - public class DisMaxQueryBuilderTests extends AbstractQueryTestCase { /** * @return a {@link DisMaxQueryBuilder} with random inner queries @@ -70,14 +65,8 @@ protected DisMaxQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { Collection queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; - assertThat(disjunctionMaxQuery.getTieBreakerMultiplier(), equalTo(queryBuilder.tieBreaker())); - assertThat(disjunctionMaxQuery.getDisjuncts().size(), equalTo(queries.size())); - Iterator queryIterator = queries.iterator(); - for (int i = 0; i < disjunctionMaxQuery.getDisjuncts().size(); i++) { - assertThat(disjunctionMaxQuery.getDisjuncts().get(i), equalTo(queryIterator.next())); - } + Query expected = new DisjunctionMaxQuery(queries, queryBuilder.tieBreaker()); + assertEquals(expected, query); } @Override @@ -114,20 +103,8 @@ public void testToQueryInnerPrefixQuery() throws Exception { + " }\n" + "}"; Query query = parseQuery(queryAsString).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; - - List disjuncts = disjunctionMaxQuery.getDisjuncts(); - assertThat(disjuncts.size(), equalTo(1)); - - assertThat(disjuncts.get(0), instanceOf(BoostQuery.class)); - BoostQuery boostQuery = (BoostQuery) disjuncts.get(0); - assertThat((double) boostQuery.getBoost(), closeTo(1.2, 0.00001)); - assertThat(boostQuery.getQuery(), instanceOf(PrefixQuery.class)); - PrefixQuery firstQ = (PrefixQuery) boostQuery.getQuery(); - // since age is automatically registered in data, we encode it as numeric - assertThat(firstQ.getPrefix(), equalTo(new Term(TEXT_FIELD_NAME, "sh"))); - + Query expected = new DisjunctionMaxQuery(List.of(new BoostQuery(new PrefixQuery(new Term(TEXT_FIELD_NAME, "sh")), 1.2f)), 0); + assertEquals(expected, query); } public void testFromJson() throws IOException { diff --git a/server/src/test/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilderTests.java index 10d4a7918fb1e..402b44ed3df76 100644 --- a/server/src/test/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilderTests.java @@ -32,8 +32,12 @@ package org.opensearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanTermQuery; +import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -52,6 +56,7 @@ protected FieldMaskingSpanQueryBuilder doCreateTestQueryBuilder() { fieldName = randomAlphaOfLengthBetween(1, 10); } SpanTermQueryBuilder innerQuery = new SpanTermQueryBuilderTests().createTestQueryBuilder(); + innerQuery.boost(1f); return new FieldMaskingSpanQueryBuilder(innerQuery, fieldName); } @@ -62,7 +67,8 @@ protected void doAssertLuceneQuery(FieldMaskingSpanQueryBuilder queryBuilder, Qu assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); FieldMaskingSpanQuery fieldMaskingSpanQuery = (FieldMaskingSpanQuery) query; assertThat(fieldMaskingSpanQuery.getField(), equalTo(fieldInQuery)); - assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(queryBuilder.innerQuery().toQuery(context))); + Query sub = queryBuilder.innerQuery().toQuery(context); + assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(sub)); } public void testIllegalArguments() { @@ -90,10 +96,35 @@ public void testFromJson() throws IOException { + " \"_name\" : \"KPI\"\n" + " }\n" + "}"; - FieldMaskingSpanQueryBuilder parsed = (FieldMaskingSpanQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); - assertEquals(json, 42.0, parsed.boost(), 0.00001); - assertEquals(json, 0.23, parsed.innerQuery().boost(), 0.00001); + Exception exception = expectThrows(ParsingException.class, () -> parseQuery(json)); + assertThat( + exception.getMessage(), + equalTo( + SPAN_FIELD_MASKING_FIELD.getPreferredName() + " [query] as a nested span clause can't have non-default boost value [0.23]" + ) + ); + } + + public void testJsonSpanTermWithBoost() throws IOException { + String json = "{\n" + + " \"span_field_masking\" : {\n" + + " \"query\" : {\n" + + " \"span_term\" : {\n" + + " \"value\" : {\n" + + " \"value\" : \"term\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"field\" : \"mapped_geo_shape\",\n" + + " \"boost\" : 42.0,\n" + + " \"_name\" : \"KPI\"\n" + + " }\n" + + "}"; + Query query = parseQuery(json).toQuery(createShardContext()); + assertEquals( + new BoostQuery(new FieldMaskingSpanQuery(new SpanTermQuery(new Term("value", "term")), "mapped_geo_shape"), 42f), + query + ); } public void testDeprecatedName() throws IOException { diff --git a/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java index 758f69f715a4d..9ae95fd941a59 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -277,7 +277,9 @@ public void testAnalysisSynonym() throws Exception { query, asList( new TermQuery(new Term(TEXT_FIELD_NAME, "fox")), - new SynonymQuery(new Term(TEXT_FIELD_NAME, "dogs"), new Term(TEXT_FIELD_NAME, "dog")), + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "dogs")) + .addTerm(new Term(TEXT_FIELD_NAME, "dog")) + .build(), new PrefixQuery(new Term(TEXT_FIELD_NAME, "red")) ) ); diff --git a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java index bf42aca156805..354e932f6b9f9 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java @@ -37,6 +37,10 @@ import org.apache.lucene.analysis.MockSynonymAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; @@ -47,10 +51,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; import org.opensearch.common.ParsingException; diff --git a/server/src/test/java/org/opensearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MultiMatchQueryBuilderTests.java index 294674ee01189..ec04ee8fd3d6d 100644 --- a/server/src/test/java/org/opensearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MultiMatchQueryBuilderTests.java @@ -64,9 +64,9 @@ import static org.opensearch.index.query.QueryBuilders.multiMatchQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBooleanSubQuery; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertDisjunctionSubQuery; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.instanceOf; @@ -241,34 +241,29 @@ public void testToQueryMultipleFieldsDisableDismax() throws Exception { .field(KEYWORD_FIELD_NAME) .tieBreaker(1.0f) .toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; - assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f)); - assertThat(dQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 1 + ); + assertEquals(expected, query); } public void testToQueryMultipleFieldsDisMaxQuery() throws Exception { Query query = multiMatchQuery("test").field(TEXT_FIELD_NAME).field(KEYWORD_FIELD_NAME).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; - assertThat(disMaxQuery.getTieBreakerMultiplier(), equalTo(0.0f)); - List disjuncts = disMaxQuery.getDisjuncts(); - assertThat(disjuncts.get(0), instanceOf(TermQuery.class)); - assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(disjuncts.get(1), instanceOf(TermQuery.class)); - assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 0 + ); + assertEquals(expected, query); } public void testToQueryFieldsWildcard() throws Exception { Query query = multiMatchQuery("test").field("mapped_str*").tieBreaker(1.0f).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; - assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f)); - assertThat(dQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 1 + ); + assertEquals(expected, query); } public void testToQueryFieldMissing() throws Exception { @@ -298,11 +293,22 @@ public void testToQueryBooleanPrefixMultipleFields() throws IOException { assertThat(query, instanceOf(DisjunctionMaxQuery.class)); final DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; assertThat(disMaxQuery.getDisjuncts(), hasSize(2)); - final BooleanQuery firstDisjunct = assertDisjunctionSubQuery(disMaxQuery, BooleanQuery.class, 0); - assertThat(firstDisjunct.clauses(), hasSize(2)); - assertThat(assertBooleanSubQuery(firstDisjunct, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "foo"))); - final PrefixQuery secondDisjunct = assertDisjunctionSubQuery(disMaxQuery, PrefixQuery.class, 1); - assertThat(secondDisjunct.getPrefix(), equalTo(new Term(KEYWORD_FIELD_NAME, "foo bar"))); + for (Query disjunct : disMaxQuery.getDisjuncts()) { + if (disjunct instanceof BooleanQuery) { + final BooleanQuery firstDisjunct = (BooleanQuery) disjunct; + assertThat(firstDisjunct.clauses(), hasSize(2)); + assertThat( + assertBooleanSubQuery(firstDisjunct, TermQuery.class, 0).getTerm(), + equalTo(new Term(TEXT_FIELD_NAME, "foo")) + ); + } else if (disjunct instanceof PrefixQuery) { + final PrefixQuery secondDisjunct = (PrefixQuery) disjunct; + assertThat(secondDisjunct.getPrefix(), equalTo(new Term(KEYWORD_FIELD_NAME, "foo bar"))); + } else { + throw new AssertionError(); + } + assertThat(disjunct, either(instanceOf(BooleanQuery.class)).or(instanceOf(PrefixQuery.class))); + } } } diff --git a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java index 8eaeaa17f7bb5..d08f2ef170bf2 100644 --- a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java @@ -37,6 +37,9 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; @@ -58,9 +61,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; @@ -95,7 +95,6 @@ import static org.opensearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.opensearch.index.query.QueryBuilders.queryStringQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBooleanSubQuery; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertDisjunctionSubQuery; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.Matchers.containsString; @@ -503,29 +502,29 @@ public void testToQueryMultipleTermsBooleanQuery() throws Exception { public void testToQueryMultipleFieldsBooleanQuery() throws Exception { Query query = queryStringQuery("test").field(TEXT_FIELD_NAME).field(KEYWORD_FIELD_NAME).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery bQuery = (DisjunctionMaxQuery) query; - assertThat(bQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 0 + ); + assertEquals(expected, query); } public void testToQueryMultipleFieldsDisMaxQuery() throws Exception { Query query = queryStringQuery("test").field(TEXT_FIELD_NAME).field(KEYWORD_FIELD_NAME).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; - List disjuncts = disMaxQuery.getDisjuncts(); - assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 0 + ); + assertEquals(expected, query); } public void testToQueryFieldsWildcard() throws Exception { Query query = queryStringQuery("test").field("mapped_str*").toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; - assertThat(dQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 0 + ); + assertEquals(expected, query); } /** @@ -544,11 +543,14 @@ public void testAllowLeadingWildcard() throws Exception { public void testToQueryDisMaxQuery() throws Exception { Query query = queryStringQuery("test").field(TEXT_FIELD_NAME, 2.2f).field(KEYWORD_FIELD_NAME).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; - List disjuncts = disMaxQuery.getDisjuncts(); - assertTermOrBoostQuery(disjuncts.get(0), TEXT_FIELD_NAME, "test", 2.2f); - assertTermOrBoostQuery(disjuncts.get(1), KEYWORD_FIELD_NAME, "test", 1.0f); + Query expected = new DisjunctionMaxQuery( + List.of( + new BoostQuery(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), 2.2f), + new TermQuery(new Term(KEYWORD_FIELD_NAME, "test")) + ), + 0 + ); + assertEquals(expected, query); } public void testToQueryWildcardQuery() throws Exception { @@ -602,15 +604,27 @@ public void testToQueryWilcardQueryWithSynonyms() throws Exception { Query query = queryParser.parse("first foo-bar-foobar* last"); Query expectedQuery = new BooleanQuery.Builder().add( - new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "first"), new Term(TEXT_FIELD_NAME, "first")), defaultOp) + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "first")) + .addTerm(new Term(TEXT_FIELD_NAME, "first")) + .build(), + defaultOp + ) ) .add( new BooleanQuery.Builder().add( - new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "foo"), new Term(TEXT_FIELD_NAME, "foo")), defaultOp) + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "foo")) + .addTerm(new Term(TEXT_FIELD_NAME, "foo")) + .build(), + defaultOp + ) ) .add( new BooleanClause( - new SynonymQuery(new Term(TEXT_FIELD_NAME, "bar"), new Term(TEXT_FIELD_NAME, "bar")), + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "bar")) + .addTerm(new Term(TEXT_FIELD_NAME, "bar")) + .build(), defaultOp ) ) @@ -625,7 +639,14 @@ public void testToQueryWilcardQueryWithSynonyms() throws Exception { .build(), defaultOp ) - .add(new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "last"), new Term(TEXT_FIELD_NAME, "last")), defaultOp)) + .add( + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "last")) + .addTerm(new Term(TEXT_FIELD_NAME, "last")) + .build(), + defaultOp + ) + ) .build(); assertThat(query, Matchers.equalTo(expectedQuery)); } diff --git a/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java index 35a04e80e4511..b11e0cab76340 100644 --- a/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java @@ -35,6 +35,10 @@ import org.apache.lucene.analysis.MockSynonymAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -46,10 +50,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.TestUtil; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; @@ -509,15 +509,27 @@ public void testAnalyzerWildcardWithSynonyms() throws IOException { parser.setDefaultOperator(defaultOp); Query query = parser.parse("first foo-bar-foobar* last"); Query expectedQuery = new BooleanQuery.Builder().add( - new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "first"), new Term(TEXT_FIELD_NAME, "first")), defaultOp) + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "first")) + .addTerm(new Term(TEXT_FIELD_NAME, "first")) + .build(), + defaultOp + ) ) .add( new BooleanQuery.Builder().add( - new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "foo"), new Term(TEXT_FIELD_NAME, "foo")), defaultOp) + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "foo")) + .addTerm(new Term(TEXT_FIELD_NAME, "foo")) + .build(), + defaultOp + ) ) .add( new BooleanClause( - new SynonymQuery(new Term(TEXT_FIELD_NAME, "bar"), new Term(TEXT_FIELD_NAME, "bar")), + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "bar")) + .addTerm(new Term(TEXT_FIELD_NAME, "bar")) + .build(), defaultOp ) ) @@ -532,7 +544,14 @@ public void testAnalyzerWildcardWithSynonyms() throws IOException { .build(), defaultOp ) - .add(new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "last"), new Term(TEXT_FIELD_NAME, "last")), defaultOp)) + .add( + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "last")) + .addTerm(new Term(TEXT_FIELD_NAME, "last")) + .build(), + defaultOp + ) + ) .build(); assertThat(query, equalTo(expectedQuery)); } diff --git a/server/src/test/java/org/opensearch/index/query/SpanContainingQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanContainingQueryBuilderTests.java index e43bd8e8d4f2b..33be236b45ab6 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanContainingQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanContainingQueryBuilderTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanContainingQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanContainingQuery; import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; diff --git a/server/src/test/java/org/opensearch/index/query/SpanFirstQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanFirstQueryBuilderTests.java index d2c1fd8aaa021..bbae35478c6b5 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanFirstQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanFirstQueryBuilderTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanFirstQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanFirstQuery; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentBuilder; diff --git a/server/src/test/java/org/opensearch/index/query/SpanGapQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanGapQueryBuilderTests.java index 5e85a7eb5da01..3b4755d651c50 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanGapQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanGapQueryBuilderTests.java @@ -32,11 +32,10 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanBoostQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -74,9 +73,7 @@ protected SpanNearQueryBuilder doCreateTestQueryBuilder() { protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { assertThat( query, - either(instanceOf(SpanNearQuery.class)).or(instanceOf(SpanTermQuery.class)) - .or(instanceOf(SpanBoostQuery.class)) - .or(instanceOf(MatchAllQueryBuilder.class)) + either(instanceOf(SpanNearQuery.class)).or(instanceOf(SpanTermQuery.class)).or(instanceOf(MatchAllQueryBuilder.class)) ); if (query instanceof SpanNearQuery) { SpanNearQuery spanNearQuery = (SpanNearQuery) query; @@ -89,7 +86,7 @@ protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query quer if (spanQB instanceof SpanGapQueryBuilder) continue; assertThat(spanQuery, equalTo(spanQB.toQuery(context))); } - } else if (query instanceof SpanTermQuery || query instanceof SpanBoostQuery) { + } else if (query instanceof SpanTermQuery) { assertThat(queryBuilder.clauses().size(), equalTo(1)); assertThat(query, equalTo(queryBuilder.clauses().get(0).toQuery(context))); } diff --git a/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java index a17f9e8c9d921..011d05aef1214 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -39,6 +39,10 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; @@ -47,10 +51,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopTermsRewrite; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; @@ -184,6 +184,11 @@ public String fieldName() { } } + @Override + protected boolean supportsBoost() { + return false; + } + /** * test checks that we throw an {@link UnsupportedOperationException} if the query wrapped * by {@link SpanMultiTermQueryBuilder} does not generate a lucene {@link MultiTermQuery}. diff --git a/server/src/test/java/org/opensearch/index/query/SpanNearQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanNearQueryBuilderTests.java index ae2848f442e35..416e37db764b3 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanNearQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanNearQueryBuilderTests.java @@ -33,11 +33,10 @@ package org.opensearch.index.query; import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanBoostQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; @@ -66,7 +65,6 @@ protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query quer assertThat( query, either(instanceOf(SpanNearQuery.class)).or(instanceOf(SpanTermQuery.class)) - .or(instanceOf(SpanBoostQuery.class)) .or(instanceOf(SpanMatchNoDocsQuery.class)) .or(instanceOf(MatchAllQueryBuilder.class)) ); @@ -79,7 +77,7 @@ protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query quer for (SpanQuery spanQuery : spanNearQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } - } else if (query instanceof SpanTermQuery || query instanceof SpanBoostQuery) { + } else if (query instanceof SpanTermQuery) { assertThat(queryBuilder.clauses().size(), equalTo(1)); assertThat(query, equalTo(queryBuilder.clauses().get(0).toQuery(context))); } diff --git a/server/src/test/java/org/opensearch/index/query/SpanNotQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanNotQueryBuilderTests.java index 552539934027d..4222e42af9043 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanNotQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanNotQueryBuilderTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanNotQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanNotQuery; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentBuilder; diff --git a/server/src/test/java/org/opensearch/index/query/SpanOrQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanOrQueryBuilderTests.java index ec2da8ab33779..45764708efb46 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanOrQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanOrQueryBuilderTests.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; diff --git a/server/src/test/java/org/opensearch/index/query/SpanTermQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanTermQueryBuilderTests.java index ae0da8101a7d1..30f2143d74f1d 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanTermQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanTermQueryBuilderTests.java @@ -33,10 +33,10 @@ package org.opensearch.index.query; import com.fasterxml.jackson.core.io.JsonStringEncoder; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.ParsingException; import org.opensearch.common.lucene.BytesRefs; import org.opensearch.index.mapper.MappedFieldType; diff --git a/server/src/test/java/org/opensearch/index/query/SpanWithinQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanWithinQueryBuilderTests.java index 2cb9d6ae5f91e..25fd137aac286 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanWithinQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanWithinQueryBuilderTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanWithinQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanWithinQuery; import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; diff --git a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java index c6cd667338303..3c39773108830 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java @@ -42,7 +42,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; -import org.apache.lucene.search.CoveringQuery; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/index/query/plugin/DummyQueryParserPlugin.java b/server/src/test/java/org/opensearch/index/query/plugin/DummyQueryParserPlugin.java index 64d3a8c682163..37766153efd4c 100644 --- a/server/src/test/java/org/opensearch/index/query/plugin/DummyQueryParserPlugin.java +++ b/server/src/test/java/org/opensearch/index/query/plugin/DummyQueryParserPlugin.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.opensearch.plugins.Plugin; @@ -74,5 +75,10 @@ public boolean equals(Object obj) { public int hashCode() { return classHash(); } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } } diff --git a/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java b/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java index ef87a70b71594..a7f765fee23da 100644 --- a/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java +++ b/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java @@ -258,16 +258,15 @@ public void testMultiMatchCrossFieldsWithSynonyms() throws IOException { // check that synonym query is used for a single field Query parsedQuery = parser.parse(MultiMatchQueryBuilder.Type.CROSS_FIELDS, fieldNames, "dogs", null); - Term[] terms = new Term[2]; - terms[0] = new Term("name.first", "dog"); - terms[1] = new Term("name.first", "dogs"); - Query expectedQuery = new SynonymQuery(terms); + Query expectedQuery = new SynonymQuery.Builder("name.first").addTerm(new Term("name.first", "dog")) + .addTerm(new Term("name.first", "dogs")) + .build(); assertThat(parsedQuery, equalTo(expectedQuery)); // check that blended term query is used for multiple fields fieldNames.put("name.last", 1.0f); parsedQuery = parser.parse(MultiMatchQueryBuilder.Type.CROSS_FIELDS, fieldNames, "dogs", null); - terms = new Term[4]; + Term[] terms = new Term[4]; terms[0] = new Term("name.first", "dog"); terms[1] = new Term("name.first", "dogs"); terms[2] = new Term("name.last", "dog"); diff --git a/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java index 6ecc27c155d3d..ee0b99bdc102c 100644 --- a/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java @@ -57,7 +57,6 @@ import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; -import org.opensearch.Version; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.lucene.search.Queries; import org.opensearch.common.settings.Settings; @@ -826,7 +825,7 @@ private static TopFieldDocs search( IndexSearcher searcher ) throws IOException { Query query = new BooleanQuery.Builder().add(queryBuilder.toQuery(queryShardContext), Occur.MUST) - .add(Queries.newNonNestedFilter(Version.CURRENT), Occur.FILTER) + .add(Queries.newNonNestedFilter(), Occur.FILTER) .build(); Sort sort = new Sort(sortBuilder.build(queryShardContext).field); return searcher.search(query, 10, sort); diff --git a/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java index ed29939163dbd..7f1f4ade53c50 100644 --- a/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java @@ -74,8 +74,7 @@ public void testSameNormsAsBM25DiscountOverlaps() { private void doTestSameNormsAsBM25(boolean discountOverlaps) { ScriptedSimilarity sim1 = new ScriptedSimilarity("foobar", null, "foobaz", null, discountOverlaps); - BM25Similarity sim2 = new BM25Similarity(); - sim2.setDiscountOverlaps(discountOverlaps); + BM25Similarity sim2 = new BM25Similarity(discountOverlaps); for (int iter = 0; iter < 100; ++iter) { final int length = TestUtil.nextInt(random(), 1, 100); final int position = random().nextInt(length); diff --git a/server/src/test/java/org/opensearch/index/similarity/SimilarityServiceTests.java b/server/src/test/java/org/opensearch/index/similarity/SimilarityServiceTests.java index 4c183aae558bc..eb666f1206c26 100644 --- a/server/src/test/java/org/opensearch/index/similarity/SimilarityServiceTests.java +++ b/server/src/test/java/org/opensearch/index/similarity/SimilarityServiceTests.java @@ -32,11 +32,11 @@ package org.opensearch.index.similarity; import org.apache.lucene.index.FieldInvertState; +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.similarities.BooleanSimilarity; import org.apache.lucene.search.similarities.Similarity; -import org.apache.lucene.search.similarity.LegacyBM25Similarity; import org.opensearch.LegacyESVersion; import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexSettings; diff --git a/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java index 01850c3384e12..418b933558e63 100644 --- a/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java @@ -32,6 +32,7 @@ package org.opensearch.index.similarity; +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.similarities.AfterEffectL; import org.apache.lucene.search.similarities.BasicModelG; import org.apache.lucene.search.similarities.BooleanSimilarity; @@ -44,7 +45,6 @@ import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity; import org.apache.lucene.search.similarities.LambdaTTF; import org.apache.lucene.search.similarities.NormalizationH2; -import org.apache.lucene.search.similarity.LegacyBM25Similarity; import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; diff --git a/server/src/test/java/org/opensearch/index/store/StoreTests.java b/server/src/test/java/org/opensearch/index/store/StoreTests.java index 0a0f011ee7953..53ba689fbe011 100644 --- a/server/src/test/java/org/opensearch/index/store/StoreTests.java +++ b/server/src/test/java/org/opensearch/index/store/StoreTests.java @@ -40,7 +40,6 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexNotFoundException; @@ -237,10 +236,10 @@ public void testChecksumCorrupted() throws IOException { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); } - output.writeInt(CodecUtil.FOOTER_MAGIC); - output.writeInt(0); + CodecUtil.writeBEInt(output, CodecUtil.FOOTER_MAGIC); + CodecUtil.writeBEInt(output, 0); String checksum = Store.digestToString(output.getChecksum()); - output.writeLong(output.getChecksum() + 1); // write a wrong checksum to the file + CodecUtil.writeBELong(output, output.getChecksum() + 1); // write a wrong checksum to the file output.close(); IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT); @@ -502,9 +501,7 @@ public void assertDeleteContent(Store store, Directory dir) throws IOException { public static void assertConsistent(Store store, Store.MetadataSnapshot metadata) throws IOException { for (String file : store.directory().listAll()) { - if (!IndexWriter.WRITE_LOCK_NAME.equals(file) - && !IndexFileNames.OLD_SEGMENTS_GEN.equals(file) - && file.startsWith("extra") == false) { + if (IndexWriter.WRITE_LOCK_NAME.equals(file) == false && file.startsWith("extra") == false) { assertTrue( file + " is not in the map: " + metadata.asMap().size() + " vs. " + store.directory().listAll().length, metadata.asMap().containsKey(file) diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java index 35fec28a1c798..f1a6ba84e8543 100644 --- a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java @@ -34,6 +34,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; @@ -43,6 +44,8 @@ import org.apache.lucene.mockfile.FilterFileChannel; import org.apache.lucene.mockfile.FilterFileSystemProvider; import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.ByteArrayDataOutput; +import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase; @@ -1401,7 +1404,8 @@ public void testTranslogWriter() throws IOException { final Set seenSeqNos = new HashSet<>(); boolean opsHaveValidSequenceNumbers = randomBoolean(); for (int i = 0; i < numOps; i++) { - BytesStreamOutput out = new BytesStreamOutput(4); + byte[] bytes = new byte[4]; + DataOutput out = EndiannessReverserUtil.wrapDataOutput(new ByteArrayDataOutput(bytes)); out.writeInt(i); long seqNo; do { @@ -1411,7 +1415,7 @@ public void testTranslogWriter() throws IOException { if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { seenSeqNos.add(seqNo); } - writer.add(ReleasableBytesReference.wrap(out.bytes()), seqNo); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), seqNo); } assertThat(persistedSeqNos, empty()); writer.sync(); @@ -1433,9 +1437,10 @@ public void testTranslogWriter() throws IOException { assertThat(reader.getCheckpoint().minSeqNo, equalTo(minSeqNo)); assertThat(reader.getCheckpoint().maxSeqNo, equalTo(maxSeqNo)); - BytesStreamOutput out = new BytesStreamOutput(4); + byte[] bytes = new byte[4]; + DataOutput out = EndiannessReverserUtil.wrapDataOutput(new ByteArrayDataOutput(bytes)); out.writeInt(2048); - writer.add(ReleasableBytesReference.wrap(out.bytes()), randomNonNegativeLong()); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), randomNonNegativeLong()); if (reader instanceof TranslogReader) { ByteBuffer buffer = ByteBuffer.allocate(4); @@ -1641,9 +1646,10 @@ ChannelFactory getChannelFactory() { ) { TranslogWriter writer = translog.getCurrent(); - BytesStreamOutput out = new BytesStreamOutput(4); + byte[] bytes = new byte[4]; + DataOutput out = EndiannessReverserUtil.wrapDataOutput(new ByteArrayDataOutput(new byte[4])); out.writeInt(1); - writer.add(ReleasableBytesReference.wrap(out.bytes()), 1); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), 1); assertThat(persistedSeqNos, empty()); startBlocking.set(true); Thread thread = new Thread(() -> { @@ -1657,7 +1663,7 @@ ChannelFactory getChannelFactory() { writeStarted.await(); // Add will not block even though we are currently writing/syncing - writer.add(ReleasableBytesReference.wrap(out.bytes()), 2); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), 2); blocker.countDown(); // Sync against so that both operations are written @@ -1672,10 +1678,10 @@ public void testCloseIntoReader() throws IOException { try (TranslogWriter writer = translog.createWriter(translog.currentFileGeneration() + 1)) { final int numOps = randomIntBetween(8, 128); for (int i = 0; i < numOps; i++) { - final BytesStreamOutput out = new BytesStreamOutput(4); - out.reset(); + final byte[] bytes = new byte[4]; + final DataOutput out = EndiannessReverserUtil.wrapDataOutput(new ByteArrayDataOutput(bytes)); out.writeInt(i); - writer.add(ReleasableBytesReference.wrap(out.bytes()), randomNonNegativeLong()); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), randomNonNegativeLong()); } writer.sync(); final Checkpoint writerCheckpoint = writer.getCheckpoint(); diff --git a/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java index 383c0277e1c27..24d8d51042548 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSetIterator; @@ -45,6 +44,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; @@ -58,7 +58,6 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.util.Set; public class IndicesQueryCacheTests extends OpenSearchTestCase { @@ -70,6 +69,11 @@ private static class DummyQuery extends Query { this.id = id; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public boolean equals(Object obj) { return sameClassAs(obj) && id == ((DummyQuery) obj).id; @@ -374,11 +378,6 @@ private static class DummyWeight extends Weight { this.weight = weight; } - @Override - public void extractTerms(Set terms) { - weight.extractTerms(terms); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return weight.explain(context, doc); diff --git a/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java index fa927a58a2de1..bc2ecc2e62fae 100644 --- a/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java @@ -130,7 +130,6 @@ private Settings loadFromClasspath(String path) throws IOException { .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - } public void testSimpleConfigurationJson() throws IOException { diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoveryStatusTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoveryStatusTests.java index c73b802720547..73caa611dbcdb 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoveryStatusTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoveryStatusTests.java @@ -31,6 +31,7 @@ package org.opensearch.indices.recovery; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.store.IndexOutput; import org.opensearch.common.util.set.Sets; @@ -65,10 +66,10 @@ public void testRenameTempFiles() throws IOException { indexShard.store() ) ) { - indexOutput.writeInt(1); + EndiannessReverserUtil.wrapDataOutput(indexOutput).writeInt(1); IndexOutput openIndexOutput = multiFileWriter.getOpenIndexOutput("foo.bar"); assertSame(openIndexOutput, indexOutput); - openIndexOutput.writeInt(1); + EndiannessReverserUtil.wrapDataOutput(indexOutput).writeInt(1); CodecUtil.writeFooter(indexOutput); } diff --git a/server/src/test/java/org/apache/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java similarity index 95% rename from server/src/test/java/org/apache/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java rename to server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java index 5ecd590f907a7..38f1c23bfa1f3 100644 --- a/server/src/test/java/org/apache/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java +++ b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java @@ -30,12 +30,15 @@ * GitHub history for details. */ -package org.apache.lucene.analysis.miscellaneous; +package org.opensearch.lucene.analysis.miscellaneous; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.miscellaneous.DeDuplicatingTokenFilter; +import org.apache.lucene.analysis.miscellaneous.DuplicateByteSequenceSpotter; +import org.apache.lucene.analysis.miscellaneous.DuplicateSequenceAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java similarity index 95% rename from server/src/test/java/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java rename to server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java index d73ef1f624504..c4601a9053f54 100644 --- a/server/src/test/java/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java +++ b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java @@ -30,12 +30,13 @@ * GitHub history for details. */ -package org.apache.lucene.analysis.miscellaneous; +package org.opensearch.lucene.analysis.miscellaneous; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java b/server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java rename to server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java index f5c98323d4c0e..514ff904e6ff3 100644 --- a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java +++ b/server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.grouping; +package org.opensearch.lucene.grouping; import org.apache.lucene.document.Document; import org.apache.lucene.document.NumericDocValuesField; @@ -364,7 +364,7 @@ public SortField sortField(boolean multivalued) { if (multivalued) { return new SortedSetSortField("field", false); } else { - return new SortField("field", SortField.Type.STRING_VAL); + return new SortField("field", SortField.Type.STRING); } } }; @@ -435,7 +435,7 @@ public void testEmptySortedSegment() throws Exception { MappedFieldType fieldType = new MockFieldMapper.FakeFieldType("group"); - Sort sort = new Sort(new SortField("group", SortField.Type.STRING_VAL)); + Sort sort = new Sort(new SortField("group", SortField.Type.STRING)); final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createKeyword("group", fieldType, sort, 10); searcher.search(new MatchAllDocsQuery(), collapsingCollector); diff --git a/server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java b/server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java similarity index 89% rename from server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java rename to server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java index fd5b54aa685b6..fcce7819d6143 100644 --- a/server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java +++ b/server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java @@ -30,12 +30,21 @@ * GitHub history for details. */ -package org.apache.lucene.index; +package org.opensearch.lucene.index; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StringField; +import org.apache.lucene.index.BaseMergePolicyTestCase; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.ShuffleForcedMergePolicy; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java b/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java new file mode 100644 index 0000000000000..7f89176c302df --- /dev/null +++ b/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java @@ -0,0 +1,121 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.opensearch.lucene.misc.search.similarity; + +import java.util.Random; + +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; +import org.apache.lucene.search.similarities.BM25Similarity; +import org.apache.lucene.search.similarities.BaseSimilarityTestCase; +import org.apache.lucene.search.similarities.Similarity; + +@Deprecated +public class LegacyBM25SimilarityTests extends BaseSimilarityTestCase { + + public void testIllegalK1() { + IllegalArgumentException expected = expectThrows( + IllegalArgumentException.class, + () -> { new LegacyBM25Similarity(Float.POSITIVE_INFINITY, 0.75f); } + ); + assertTrue(expected.getMessage().contains("illegal k1 value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(-1, 0.75f); }); + assertTrue(expected.getMessage().contains("illegal k1 value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(Float.NaN, 0.75f); }); + assertTrue(expected.getMessage().contains("illegal k1 value")); + } + + public void testIllegalB() { + IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(1.2f, 2f); }); + assertTrue(expected.getMessage().contains("illegal b value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(1.2f, -1f); }); + assertTrue(expected.getMessage().contains("illegal b value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(1.2f, Float.POSITIVE_INFINITY); }); + assertTrue(expected.getMessage().contains("illegal b value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(1.2f, Float.NaN); }); + assertTrue(expected.getMessage().contains("illegal b value")); + } + + public void testDefaults() { + LegacyBM25Similarity legacyBM25Similarity = new LegacyBM25Similarity(); + BM25Similarity bm25Similarity = new BM25Similarity(); + assertEquals(bm25Similarity.getB(), legacyBM25Similarity.getB(), 0f); + assertEquals(bm25Similarity.getK1(), legacyBM25Similarity.getK1(), 0f); + } + + public void testToString() { + LegacyBM25Similarity legacyBM25Similarity = new LegacyBM25Similarity(); + BM25Similarity bm25Similarity = new BM25Similarity(); + assertEquals(bm25Similarity.toString(), legacyBM25Similarity.toString()); + } + + @Override + protected Similarity getSimilarity(Random random) { + return new LegacyBM25Similarity(randomK1(random), randomB(random)); + } + + private static float randomK1(Random random) { + // term frequency normalization parameter k1 + switch (random.nextInt(4)) { + case 0: + // minimum value + return 0; + case 1: + // tiny value + return Float.MIN_VALUE; + case 2: + // maximum value + // upper bounds on individual term's score is 43.262806 * (k1 + 1) * boost + // we just limit the test to "reasonable" k1 values but don't enforce this anywhere. + return Integer.MAX_VALUE; + default: + // random value + return Integer.MAX_VALUE * random.nextFloat(); + } + } + + private static float randomB(Random random) { + // length normalization parameter b [0 .. 1] + switch (random.nextInt(4)) { + case 0: + // minimum value + return 0; + case 1: + // tiny value + return Float.MIN_VALUE; + case 2: + // maximum value + return 1; + default: + // random value + return random.nextFloat(); + } + } +} diff --git a/server/src/test/java/org/apache/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java b/server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java rename to server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java index d20a6ad081a39..29a826037770f 100644 --- a/server/src/test/java/org/apache/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; diff --git a/server/src/test/java/org/apache/lucene/queries/BinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/BinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java index f0096ff4220ae..70e3c7ca53995 100644 --- a/server/src/test/java/org/apache/lucene/queries/BinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; diff --git a/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java similarity index 98% rename from server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java index 9fb482880c9de..f46a8bbca0d2a 100644 --- a/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -39,17 +39,17 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermStates; +import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; @@ -241,8 +241,7 @@ public void testExtractTerms() throws IOException { BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(terms.toArray(new Term[0]), random().nextFloat()); Set extracted = new HashSet<>(); - IndexSearcher searcher = new IndexSearcher(new MultiReader()); - searcher.createWeight(searcher.rewrite(blendedTermQuery), ScoreMode.COMPLETE_NO_SCORES, 1f).extractTerms(extracted); + blendedTermQuery.visit(QueryVisitor.termCollector(extracted)); assertThat(extracted.size(), equalTo(terms.size())); assertThat(extracted, containsInAnyOrder(terms.toArray(new Term[0]))); } diff --git a/server/src/test/java/org/apache/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java index 498342d48d65e..f4d0c827ada38 100644 --- a/server/src/test/java/org/apache/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.opensearch.index.mapper.RangeType; diff --git a/server/src/test/java/org/apache/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java index 45fcf845e9f61..572c043442746 100644 --- a/server/src/test/java/org/apache/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.opensearch.index.mapper.RangeType; diff --git a/server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java similarity index 81% rename from server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java index 37b758abefe87..b321fc1ed7389 100644 --- a/server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java @@ -29,10 +29,9 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.util.FutureArrays; import org.opensearch.index.mapper.RangeType; import java.net.InetAddress; @@ -57,7 +56,7 @@ protected Range nextRange(int dimensions) throws Exception { byte[] bMin = InetAddressPoint.encode(min); InetAddress max = nextInetaddress(); byte[] bMax = InetAddressPoint.encode(max); - if (FutureArrays.compareUnsigned(bMin, 0, bMin.length, bMax, 0, bMin.length) > 0) { + if (Arrays.compareUnsigned(bMin, 0, bMin.length, bMax, 0, bMin.length) > 0) { return new IpRange(max, min); } return new IpRange(min, max); @@ -104,7 +103,7 @@ protected void setMin(int dim, Object val) { InetAddress v = (InetAddress) val; byte[] e = InetAddressPoint.encode(v); - if (FutureArrays.compareUnsigned(min, 0, e.length, e, 0, e.length) < 0) { + if (Arrays.compareUnsigned(min, 0, e.length, e, 0, e.length) < 0) { max = e; maxAddress = v; } else { @@ -124,7 +123,7 @@ protected void setMax(int dim, Object val) { InetAddress v = (InetAddress) val; byte[] e = InetAddressPoint.encode(v); - if (FutureArrays.compareUnsigned(max, 0, e.length, e, 0, e.length) > 0) { + if (Arrays.compareUnsigned(max, 0, e.length, e, 0, e.length) > 0) { min = e; minAddress = v; } else { @@ -136,22 +135,22 @@ protected void setMax(int dim, Object val) { @Override protected boolean isDisjoint(Range o) { IpRange other = (IpRange) o; - return FutureArrays.compareUnsigned(min, 0, min.length, other.max, 0, min.length) > 0 - || FutureArrays.compareUnsigned(max, 0, max.length, other.min, 0, max.length) < 0; + return Arrays.compareUnsigned(min, 0, min.length, other.max, 0, min.length) > 0 + || Arrays.compareUnsigned(max, 0, max.length, other.min, 0, max.length) < 0; } @Override protected boolean isWithin(Range o) { IpRange other = (IpRange) o; - return FutureArrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) >= 0 - && FutureArrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) <= 0; + return Arrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) >= 0 + && Arrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) <= 0; } @Override protected boolean contains(Range o) { IpRange other = (IpRange) o; - return FutureArrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) <= 0 - && FutureArrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) >= 0; + return Arrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) <= 0 + && Arrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) >= 0; } } diff --git a/server/src/test/java/org/apache/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java index bc5fc1b3df2f4..e4d3615277455 100644 --- a/server/src/test/java/org/apache/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.util.TestUtil; import org.opensearch.index.mapper.RangeType; diff --git a/server/src/test/java/org/apache/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java index 2dd49da45dc11..cd8457b828342 100644 --- a/server/src/test/java/org/apache/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.util.TestUtil; import org.opensearch.index.mapper.RangeType; diff --git a/server/src/test/java/org/apache/lucene/queries/MinDocQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java similarity index 98% rename from server/src/test/java/org/apache/lucene/queries/MinDocQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java index 0640935481ce4..d5c789ae0aa89 100644 --- a/server/src/test/java/org/apache/lucene/queries/MinDocQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; diff --git a/server/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java index 2faa01cc569c5..b4e035443cd82 100644 --- a/server/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedDocValuesField; diff --git a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java similarity index 93% rename from server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java index e8ac8f62be1c4..110a64e102ed4 100644 --- a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -40,14 +40,15 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryUtils; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java b/server/src/test/java/org/opensearch/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java similarity index 98% rename from server/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java rename to server/src/test/java/org/opensearch/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java index 8f1fa76facffc..d3706e9250271 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java +++ b/server/src/test/java/org/opensearch/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java @@ -30,8 +30,9 @@ * GitHub history for details. */ -package org.apache.lucene.search.uhighlight; +package org.opensearch.lucene.search.uhighlight; +import org.apache.lucene.search.uhighlight.BoundedBreakIteratorScanner; import org.opensearch.test.OpenSearchTestCase; import java.text.BreakIterator; diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomPassageFormatterTests.java similarity index 95% rename from server/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java rename to server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomPassageFormatterTests.java index 1d5695e7d54fa..10cdebaf69a36 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java +++ b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomPassageFormatterTests.java @@ -30,10 +30,13 @@ * GitHub history for details. */ -package org.apache.lucene.search.uhighlight; +package org.opensearch.lucene.search.uhighlight; import org.apache.lucene.search.highlight.DefaultEncoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder; +import org.apache.lucene.search.uhighlight.CustomPassageFormatter; +import org.apache.lucene.search.uhighlight.Passage; +import org.apache.lucene.search.uhighlight.Snippet; import org.apache.lucene.util.BytesRef; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java similarity index 82% rename from server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java rename to server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index 3265626c8a6e9..70a260837271c 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.search.uhighlight; +package org.opensearch.lucene.search.uhighlight; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.custom.CustomAnalyzer; @@ -57,6 +57,11 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.highlight.DefaultEncoder; +import org.apache.lucene.search.uhighlight.BoundedBreakIteratorScanner; +import org.apache.lucene.search.uhighlight.CustomPassageFormatter; +import org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter; +import org.apache.lucene.search.uhighlight.Snippet; +import org.apache.lucene.search.uhighlight.UnifiedHighlighter; import org.apache.lucene.store.Directory; import org.opensearch.common.Strings; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; @@ -79,49 +84,48 @@ private void assertHighlightOneDoc( int noMatchSize, String[] expectedPassages ) throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(analyzer); - iwc.setMergePolicy(newTieredMergePolicy(random())); - RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - FieldType ft = new FieldType(TextField.TYPE_STORED); - ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); - ft.freeze(); - Document doc = new Document(); - for (String input : inputs) { - Field field = new Field(fieldName, "", ft); - field.setStringValue(input); - doc.add(field); + try (Directory dir = newDirectory()) { + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); + iwc.setMergePolicy(newTieredMergePolicy(random())); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); + FieldType ft = new FieldType(TextField.TYPE_STORED); + ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + ft.freeze(); + Document doc = new Document(); + for (String input : inputs) { + Field field = new Field(fieldName, "", ft); + field.setStringValue(input); + doc.add(field); + } + iw.addDocument(doc); + try (DirectoryReader reader = iw.getReader()) { + IndexSearcher searcher = newSearcher(reader); + iw.close(); + TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); + assertThat(topDocs.totalHits.value, equalTo(1L)); + String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); + CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter( + searcher, + analyzer, + UnifiedHighlighter.OffsetSource.ANALYSIS, + new CustomPassageFormatter("", "", new DefaultEncoder()), + locale, + breakIterator, + "index", + "text", + query, + noMatchSize, + expectedPassages.length, + name -> "text".equals(name), + Integer.MAX_VALUE + ); + final Snippet[] snippets = highlighter.highlightField(getOnlyLeafReader(reader), topDocs.scoreDocs[0].doc, () -> rawValue); + assertEquals(snippets.length, expectedPassages.length); + for (int i = 0; i < snippets.length; i++) { + assertEquals(snippets[i].getText(), expectedPassages[i]); + } + } } - iw.addDocument(doc); - DirectoryReader reader = iw.getReader(); - IndexSearcher searcher = newSearcher(reader); - iw.close(); - TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); - String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); - CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter( - searcher, - analyzer, - null, - new CustomPassageFormatter("", "", new DefaultEncoder()), - locale, - breakIterator, - "index", - "text", - query, - noMatchSize, - expectedPassages.length, - name -> "text".equals(name), - Integer.MAX_VALUE, - Integer.MAX_VALUE - ); - final Snippet[] snippets = highlighter.highlightField(getOnlyLeafReader(reader), topDocs.scoreDocs[0].doc, () -> rawValue); - assertEquals(snippets.length, expectedPassages.length); - for (int i = 0; i < snippets.length; i++) { - assertEquals(snippets[i].getText(), expectedPassages[i]); - } - reader.close(); - dir.close(); } public void testSimple() throws Exception { diff --git a/server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java b/server/src/test/java/org/opensearch/lucene/util/CombinedBitSetTests.java similarity index 95% rename from server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java rename to server/src/test/java/org/opensearch/lucene/util/CombinedBitSetTests.java index 0b0d4263fcae4..722ae1a13e15f 100644 --- a/server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java +++ b/server/src/test/java/org/opensearch/lucene/util/CombinedBitSetTests.java @@ -30,9 +30,13 @@ * GitHub history for details. */ -package org.apache.lucene.util; +package org.opensearch.lucene.util; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.CombinedBitSet; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.SparseFixedBitSet; import org.opensearch.test.OpenSearchTestCase; public class CombinedBitSetTests extends OpenSearchTestCase { diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index bddc61211592e..572d2c322153f 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -648,6 +648,7 @@ public void testJarHellTransitiveMap() throws Exception { assertThat(deps, containsInAnyOrder(pluginJar.toUri().toURL(), dep1Jar.toUri().toURL(), dep2Jar.toUri().toURL())); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testNonExtensibleDep() throws Exception { // This test opens a child classloader, reading a jar under the test temp // dir (a dummy plugin). Classloaders are closed by GC, so when test teardown @@ -790,6 +791,7 @@ public FakePlugin() { } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testExistingMandatoryInstalledPlugin() throws IOException { // This test opens a child classloader, reading a jar under the test temp // dir (a dummy plugin). Classloaders are closed by GC, so when test teardown @@ -823,6 +825,7 @@ public void testExistingMandatoryInstalledPlugin() throws IOException { newPluginsService(settings); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPluginFromParentClassLoader() throws IOException { final Path pathHome = createTempDir(); final Path plugins = pathHome.resolve("plugins"); @@ -860,6 +863,7 @@ public void testPluginFromParentClassLoader() throws IOException { ); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPluginLoadFailure() throws IOException { final Path pathHome = createTempDir(); final Path plugins = pathHome.resolve("plugins"); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index f81bd012bfa63..07c6e927c2030 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -313,11 +313,11 @@ public void testUnmappedFieldWithHistogram() throws Exception { final String mappedFieldName = "price"; dataset.addAll( Arrays.asList( - createDocument(mappedFieldName, 103L), - createDocument(mappedFieldName, 51L), - createDocument(mappedFieldName, 56L), - createDocument(mappedFieldName, 105L), - createDocument(mappedFieldName, 25L) + createDocument(mappedFieldName, 103), + createDocument(mappedFieldName, 51), + createDocument(mappedFieldName, 56), + createDocument(mappedFieldName, 105), + createDocument(mappedFieldName, 25) ) ); @@ -1908,10 +1908,10 @@ public void testWithHistogramBucketMissing() throws IOException { final List>> dataset = new ArrayList<>(); dataset.addAll( Arrays.asList( - createDocument("price", 50L, "long", 1L), - createDocument("price", 60L, "long", 2L), - createDocument("price", 70L, "long", 3L), - createDocument("price", 62L, "long", 4L), + createDocument("price", 50, "long", 1L), + createDocument("price", 60, "long", 2L), + createDocument("price", 70, "long", 3L), + createDocument("price", 62, "long", 4L), createDocument("long", 5L) ) ); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 99cba603974d7..b394063033637 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -50,7 +50,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.opensearch.Version; import org.opensearch.common.CheckedConsumer; import org.opensearch.common.collect.Tuple; import org.opensearch.common.lucene.search.Queries; @@ -91,7 +90,6 @@ import org.opensearch.search.aggregations.pipeline.InternalSimpleValue; import org.opensearch.search.aggregations.support.AggregationInspectionHelper; import org.opensearch.search.aggregations.support.ValueType; -import org.opensearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; @@ -395,7 +393,7 @@ public void testResetRootDocId() throws Exception { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.add(Queries.newNonNestedFilter(VersionUtils.randomVersion(random())), BooleanClause.Occur.MUST); + bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2"))), BooleanClause.Occur.MUST_NOT); InternalNested nested = searchAndReduce( @@ -687,7 +685,7 @@ public void testPreGetChildLeafCollectors() throws IOException { Filter filter = searchAndReduce( newSearcher(indexReader, false, true), - Queries.newNonNestedFilter(Version.CURRENT), + Queries.newNonNestedFilter(), filterAggregationBuilder, fieldType1, fieldType2 diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java index 8623d26be5726..acff8305938e8 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -57,7 +57,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; -import org.apache.lucene.util.FutureArrays; import org.opensearch.common.CheckedConsumer; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; @@ -408,7 +407,7 @@ public void visit(int docID, byte[] packedValue) { @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if (FutureArrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) { + if (Arrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) { return PointValues.Relation.CELL_CROSSES_QUERY; } return PointValues.Relation.CELL_OUTSIDE_QUERY; diff --git a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java index eb87888530e94..de0a31b9dc04b 100644 --- a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java @@ -58,6 +58,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -419,11 +420,6 @@ private static class CreateScorerOnceWeight extends Weight { this.weight = weight; } - @Override - public void extractTerms(Set terms) { - weight.extractTerms(terms); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return weight.explain(context, doc); @@ -483,5 +479,10 @@ public boolean equals(Object obj) { public int hashCode() { return 31 * classHash() + query.hashCode(); } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } } diff --git a/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java b/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java index cc7200bbf78c4..7deb6845af607 100644 --- a/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java +++ b/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java @@ -36,6 +36,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.StoredFieldVisitor; +import org.apache.lucene.index.VectorSimilarityFunction; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import org.opensearch.test.OpenSearchTestCase; @@ -79,6 +80,8 @@ public void setUp() throws Exception { 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); diff --git a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java index 588d63bffb3bd..30bb29dbf2aa8 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java @@ -49,6 +49,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.RandomApproximationQuery; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -72,7 +73,6 @@ import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; @@ -202,7 +202,9 @@ public void testUseIndexStats() throws IOException { Query query = new TermQuery(new Term("foo", "bar")); searcher.count(query); // will use index stats List results = profiler.getTree(); - assertEquals(0, results.size()); + assertEquals(1, results.size()); + ProfileResult result = results.get(0); + assertEquals(0, (long) result.getTimeBreakdown().get("build_scorer_count")); long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); @@ -255,6 +257,11 @@ public String toString(String field) { return getClass().getSimpleName(); } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public boolean equals(Object obj) { return this == obj; @@ -268,10 +275,6 @@ public int hashCode() { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new Weight(this) { - @Override - public void extractTerms(Set terms) { - throw new UnsupportedOperationException(); - } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { diff --git a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java index 07650d3c2a3e2..3f7761f3f18a0 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java @@ -50,7 +50,9 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; -import org.apache.lucene.queries.MinDocQuery; +import org.opensearch.lucene.queries.MinDocQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; @@ -77,8 +79,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; @@ -722,7 +722,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false); } @@ -735,7 +734,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, true); } @@ -748,7 +746,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false); } @@ -761,7 +758,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, true); } @@ -775,7 +771,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.from(5); searchContext.setSize(0); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false); } @@ -803,7 +798,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); final TopDocs topDocs = searchContext.queryResult().topDocs().topDocs; long topValue = (long) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]; assertThat(topValue, greaterThan(afterValue)); diff --git a/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java index e53a67ebc7708..070855481966f 100644 --- a/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java @@ -37,7 +37,6 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -45,6 +44,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; +import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.AssertingIndexSearcher; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SortField; diff --git a/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java index 1576a95a9a411..80c573f3cc9ae 100644 --- a/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java @@ -32,8 +32,8 @@ package org.opensearch.indices.analysis; -import org.apache.lucene.analysis.util.TokenFilterFactory; -import org.apache.lucene.analysis.util.TokenizerFactory; +import org.apache.lucene.analysis.TokenFilterFactory; +import org.apache.lucene.analysis.TokenizerFactory; import org.opensearch.common.collect.MapBuilder; import org.opensearch.index.analysis.HunspellTokenFilterFactory; import org.opensearch.index.analysis.ShingleTokenFilterFactory; @@ -134,6 +134,7 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { .put("ngram", MovedToAnalysisCommon.class) .put("norwegianlightstem", MovedToAnalysisCommon.class) .put("norwegianminimalstem", MovedToAnalysisCommon.class) + .put("norwegiannormalization", MovedToAnalysisCommon.class) .put("patterncapturegroup", MovedToAnalysisCommon.class) .put("patternreplace", MovedToAnalysisCommon.class) .put("persiannormalization", MovedToAnalysisCommon.class) @@ -155,8 +156,11 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { .put("stemmeroverride", MovedToAnalysisCommon.class) .put("stop", StopTokenFilterFactory.class) .put("swedishlightstem", MovedToAnalysisCommon.class) + .put("swedishminimalstem", MovedToAnalysisCommon.class) .put("synonym", MovedToAnalysisCommon.class) .put("synonymgraph", MovedToAnalysisCommon.class) + .put("telugunormalization", MovedToAnalysisCommon.class) + .put("telugustem", MovedToAnalysisCommon.class) .put("trim", MovedToAnalysisCommon.class) .put("truncate", MovedToAnalysisCommon.class) .put("turkishlowercase", MovedToAnalysisCommon.class) @@ -210,10 +214,9 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { .put("delimitedboost", Void.class) // LUCENE-9574: test flags on tokens vs a bitmask and drops tokens that have all specified flags .put("dropifflagged", Void.class) + .put("japanesecompletion", Void.class) // LUCENE-9575: recognize arbitrary patterns that include punctuation .put("patterntyping", Void.class) - .put("telugustem", Void.class) - .put("telugunormalization", Void.class) .immutableMap(); static final Map> KNOWN_CHARFILTERS = new MapBuilder>() @@ -291,7 +294,7 @@ public Map> getPreConfiguredCharFilters() { public void testTokenizers() { Set missing = new TreeSet(); missing.addAll( - org.apache.lucene.analysis.util.TokenizerFactory.availableTokenizers() + org.apache.lucene.analysis.TokenizerFactory.availableTokenizers() .stream() .map(key -> key.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()) @@ -303,7 +306,7 @@ public void testTokenizers() { public void testCharFilters() { Set missing = new TreeSet(); missing.addAll( - org.apache.lucene.analysis.util.CharFilterFactory.availableCharFilters() + org.apache.lucene.analysis.CharFilterFactory.availableCharFilters() .stream() .map(key -> key.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()) @@ -315,7 +318,7 @@ public void testCharFilters() { public void testTokenFilters() { Set missing = new TreeSet(); missing.addAll( - org.apache.lucene.analysis.util.TokenFilterFactory.availableTokenFilters() + org.apache.lucene.analysis.TokenFilterFactory.availableTokenFilters() .stream() .map(key -> key.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()) diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java index cbeefa7349e16..241ae1170817a 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java @@ -34,7 +34,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -47,6 +46,7 @@ import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.AssertingIndexSearcher; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; diff --git a/test/framework/src/main/java/org/opensearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/opensearch/test/AbstractQueryTestCase.java index 97f0dde027d6b..7ce7903296a5a 100644 --- a/test/framework/src/main/java/org/opensearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/AbstractQueryTestCase.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanBoostQuery; import org.opensearch.OpenSearchParseException; import org.opensearch.Version; import org.opensearch.action.support.PlainActionFuture; @@ -545,15 +544,8 @@ private void assertLuceneQuery(QB queryBuilder, Query query, QueryShardContext c } if (query != null) { if (queryBuilder.boost() != AbstractQueryBuilder.DEFAULT_BOOST) { - assertThat( - query, - either(instanceOf(BoostQuery.class)).or(instanceOf(SpanBoostQuery.class)).or(instanceOf(MatchNoDocsQuery.class)) - ); - if (query instanceof SpanBoostQuery) { - SpanBoostQuery spanBoostQuery = (SpanBoostQuery) query; - assertThat(spanBoostQuery.getBoost(), equalTo(queryBuilder.boost())); - query = spanBoostQuery.getQuery(); - } else if (query instanceof BoostQuery) { + assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(MatchNoDocsQuery.class))); + if (query instanceof BoostQuery) { BoostQuery boostQuery = (BoostQuery) query; if (boostQuery.getQuery() instanceof MatchNoDocsQuery == false) { assertThat(boostQuery.getBoost(), equalTo(queryBuilder.boost())); diff --git a/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java index e0007e224591f..1bcde48900364 100644 --- a/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java @@ -100,7 +100,7 @@ public static void corruptFile(Random random, Path... files) throws IOException input.seek(input.length() - CodecUtil.footerLength()); checksumAfterCorruption = input.getChecksum(); input.seek(input.length() - 8); - actualChecksumAfterCorruption = input.readLong(); + actualChecksumAfterCorruption = CodecUtil.readBELong(input); } // we need to add assumptions here that the checksums actually really don't match there is a small chance to get collisions // in the checksum which is ok though.... diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java index c37eb68a42836..421c022b38e9d 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java @@ -32,7 +32,6 @@ package org.opensearch.test.hamcrest; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; import org.opensearch.OpenSearchException; @@ -564,14 +563,6 @@ public static T assertBooleanSubQuery(Query query, Class su return subqueryType.cast(q.clauses().get(i).getQuery()); } - public static T assertDisjunctionSubQuery(Query query, Class subqueryType, int i) { - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery q = (DisjunctionMaxQuery) query; - assertThat(q.getDisjuncts().size(), greaterThan(i)); - assertThat(q.getDisjuncts().get(i), instanceOf(subqueryType)); - return subqueryType.cast(q.getDisjuncts().get(i)); - } - /** * Run the request from a given builder and check that it throws an exception of the right type */ From 10b9986e1209da2aeec69181eea87702c01dfdfa Mon Sep 17 00:00:00 2001 From: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> Date: Tue, 15 Mar 2022 21:01:28 +0000 Subject: [PATCH 12/12] Override Default Distribution Download Url with Custom Distribution Url when it is passed from Plugin (#2420) * Override default Distribution Download URL with custom Distribution URL Signed-off-by: Rishikesh1159 * Accidently made commit to main branch, this revives it.Override default Distribution Download URL with custom Distribution URL Signed-off-by: Rishikesh1159 * Override Default DistributionDownloadUrl with customDistribution Url passed from Plugins Signed-off-by: Rishikesh1159 --- DEVELOPER_GUIDE.md | 12 +++++ .../gradle/DistributionDownloadPlugin.java | 25 +++++---- .../DistributionDownloadPluginTests.java | 54 +++++++++++++++++++ 3 files changed, 82 insertions(+), 9 deletions(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 58444441e3258..9b1bc933eb1e3 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -33,6 +33,8 @@ - [runtimeOnly](#runtimeonly) - [compileOnly](#compileonly) - [testImplementation](#testimplementation) + - [Gradle Plugins](#gradle-plugins) + - [Distribution Download Plugin](#distribution-download-plugin) - [Misc](#misc) - [git-secrets](#git-secrets) - [Installation](#installation) @@ -361,6 +363,16 @@ somehow. OpenSearch plugins use this configuration to include dependencies that Code that is on the classpath for compiling tests that are part of this project but not production code. The canonical example of this is `junit`. +### Gradle Plugins + +#### Distribution Download Plugin + +The Distribution Download plugin downloads the latest version of OpenSearch by default, and supports overriding this behavior by setting `customDistributionUrl`. +``` +./gradlew integTest -PcustomDistributionUrl="https://ci.opensearch.org/ci/dbc/bundle-build/1.2.0/1127/linux/x64/dist/opensearch-1.2.0-linux-x64.tar.gz" +``` + + ## Misc ### git-secrets diff --git a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java index 843a7f7d2716d..fccdc49ef6fc9 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java @@ -195,15 +195,22 @@ private static void setupDownloadServiceRepo(Project project) { if (project.getRepositories().findByName(DOWNLOAD_REPO_NAME) != null) { return; } - addIvyRepo( - project, - DOWNLOAD_REPO_NAME, - "https://artifacts.opensearch.org", - FAKE_IVY_GROUP, - "/releases" + RELEASE_PATTERN_LAYOUT, - "/release-candidates" + RELEASE_PATTERN_LAYOUT - ); - addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT); + Object customDistributionUrl = project.findProperty("customDistributionUrl"); + // checks if custom Distribution Url has been passed by user from plugins + if (customDistributionUrl != null) { + addIvyRepo(project, DOWNLOAD_REPO_NAME, customDistributionUrl.toString(), FAKE_IVY_GROUP, ""); + addIvyRepo(project, SNAPSHOT_REPO_NAME, customDistributionUrl.toString(), FAKE_SNAPSHOT_IVY_GROUP, ""); + } else { + addIvyRepo( + project, + DOWNLOAD_REPO_NAME, + "https://artifacts.opensearch.org", + FAKE_IVY_GROUP, + "/releases" + RELEASE_PATTERN_LAYOUT, + "/release-candidates" + RELEASE_PATTERN_LAYOUT + ); + addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT); + } addIvyRepo2(project, DOWNLOAD_REPO_NAME_ES, "https://artifacts-no-kpi.elastic.co", FAKE_IVY_GROUP_ES); addIvyRepo2(project, SNAPSHOT_REPO_NAME_ES, "https://snapshots-no-kpi.elastic.co", FAKE_SNAPSHOT_IVY_GROUP_ES); diff --git a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java index 98feb3ef2ac93..446c94acc7ad4 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java @@ -32,6 +32,7 @@ package org.opensearch.gradle; +import org.gradle.api.internal.artifacts.repositories.DefaultIvyArtifactRepository; import org.opensearch.gradle.OpenSearchDistribution.Platform; import org.opensearch.gradle.OpenSearchDistribution.Type; import org.opensearch.gradle.info.BuildParams; @@ -79,6 +80,59 @@ public void testVersionDefault() { assertEquals(distro.getVersion(), VersionProperties.getOpenSearch()); } + public void testCustomDistributionUrlWithUrl() { + Project project = ProjectBuilder.builder().build(); + String customUrl = "https://artifacts.opensearch.org/custom"; + project.getExtensions().getExtraProperties().set("customDistributionUrl", customUrl); + DistributionDownloadPlugin plugin = new DistributionDownloadPlugin(); + plugin.apply(project); + assertEquals(4, project.getRepositories().size()); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(), + customUrl + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(), + customUrl + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(), + "https://artifacts-no-kpi.elastic.co" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(), + "https://snapshots-no-kpi.elastic.co" + ); + + } + + public void testCustomDistributionUrlWithoutUrl() { + Project project = ProjectBuilder.builder().build(); + DistributionDownloadPlugin plugin = new DistributionDownloadPlugin(); + plugin.apply(project); + assertEquals(5, project.getRepositories().size()); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(), + "https://artifacts.opensearch.org" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads2")).getUrl().toString(), + "https://artifacts.opensearch.org" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(), + "https://artifacts.opensearch.org" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(), + "https://artifacts-no-kpi.elastic.co" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(), + "https://snapshots-no-kpi.elastic.co" + ); + } + public void testBadVersionFormat() { assertDistroError( createProject(null, false),