diff --git a/TESTING.asciidoc b/TESTING.asciidoc index ea3fecdbe27de..d0e1e0f50528c 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -352,6 +352,7 @@ These are the linux flavors the Vagrantfile currently supports: * centos-6 * centos-7 * fedora-25 +* fedora-26 * oel-6 aka Oracle Enterprise Linux 6 * oel-7 aka Oracle Enterprise Linux 7 * sles-12 diff --git a/Vagrantfile b/Vagrantfile index 830a72322cdbb..487594bba8a1a 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -64,6 +64,10 @@ Vagrant.configure(2) do |config| config.vm.box = "elastic/fedora-25-x86_64" dnf_common config end + config.vm.define "fedora-26" do |config| + config.vm.box = "elastic/fedora-26-x86_64" + dnf_common config + end config.vm.define "opensuse-42" do |config| config.vm.box = "elastic/opensuse-42-x86_64" opensuse_common config diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 73115aab88fd3..727996ab7b049 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -92,7 +92,7 @@ dependencies { compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r' compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... - compile 'de.thetaphi:forbiddenapis:2.3' + compile 'de.thetaphi:forbiddenapis:2.4.1' compile 'org.apache.rat:apache-rat:0.11' compile "org.elasticsearch:jna:4.4.0-1" } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index 17343cd5cacd9..8c94d48fcc43a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -20,6 +20,7 @@ class VagrantTestPlugin implements Plugin { 'debian-8', 'debian-9', 'fedora-25', + 'fedora-26', 'oel-6', 'oel-7', 'opensuse-42', diff --git a/buildSrc/version.properties b/buildSrc/version.properties index f2a953753a506..7e34dec9e4bf8 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,6 +1,6 @@ # When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy elasticsearch = 7.0.0-alpha1 -lucene = 7.0.0 +lucene = 7.1.0-snapshot-f33ed4ba12a # optional dependencies spatial4j = 0.6 diff --git a/core/licenses/lucene-NOTICE.txt b/core/licenses/lucene-NOTICE.txt index ecf08201a5ee6..1a1d51572432a 100644 --- a/core/licenses/lucene-NOTICE.txt +++ b/core/licenses/lucene-NOTICE.txt @@ -54,13 +54,14 @@ The KStem stemmer in was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) under the BSD-license. -The Arabic,Persian,Romanian,Bulgarian, and Hindi analyzers (common) come with a default +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, -analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt See http://members.unine.ch/jacques.savoy/clef/index.html. The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers diff --git a/core/licenses/lucene-analyzers-common-7.0.0.jar.sha1 b/core/licenses/lucene-analyzers-common-7.0.0.jar.sha1 deleted file mode 100644 index e630261521bd4..0000000000000 --- a/core/licenses/lucene-analyzers-common-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -91f9e9717d7cf558927bbada26a651238cacb228 \ No newline at end of file diff --git a/core/licenses/lucene-analyzers-common-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-analyzers-common-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..6863607ab7906 --- /dev/null +++ b/core/licenses/lucene-analyzers-common-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +a59ac3bdd17becc848f319fb77994060661c2c71 \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-7.0.0.jar.sha1 b/core/licenses/lucene-backward-codecs-7.0.0.jar.sha1 deleted file mode 100644 index 68a0b9e850167..0000000000000 --- a/core/licenses/lucene-backward-codecs-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3f03d0b56835bf8fc53ce78c3114a10df355985f \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-backward-codecs-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..2bc2287cf2d57 --- /dev/null +++ b/core/licenses/lucene-backward-codecs-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +47f560086db8683b5be26911fae3721d8b0da465 \ No newline at end of file diff --git a/core/licenses/lucene-core-7.0.0.jar.sha1 b/core/licenses/lucene-core-7.0.0.jar.sha1 deleted file mode 100644 index 31b29b60fe229..0000000000000 --- a/core/licenses/lucene-core-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -59a0bf5b97304ffbe577220779b4992020d49f7e \ No newline at end of file diff --git a/core/licenses/lucene-core-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-core-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..a1079a6df66eb --- /dev/null +++ b/core/licenses/lucene-core-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +17bd8e886ac2e763c27a507e697f78e43103afd3 \ No newline at end of file diff --git a/core/licenses/lucene-grouping-7.0.0.jar.sha1 b/core/licenses/lucene-grouping-7.0.0.jar.sha1 deleted file mode 100644 index cb0826e73a562..0000000000000 --- a/core/licenses/lucene-grouping-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -59205eb7e0d3a33ad778b60f82ca3b17e24ac6b7 \ No newline at end of file diff --git a/core/licenses/lucene-grouping-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-grouping-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..036e7d5b1563d --- /dev/null +++ b/core/licenses/lucene-grouping-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +bb7d5f5f6dd0bada3991828b8687a35c90de76ca \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-7.0.0.jar.sha1 b/core/licenses/lucene-highlighter-7.0.0.jar.sha1 deleted file mode 100644 index ab0a5a570d1c7..0000000000000 --- a/core/licenses/lucene-highlighter-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7ce11d6b34bd43563d5d63314088799879612692 \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-highlighter-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..28e24b897f416 --- /dev/null +++ b/core/licenses/lucene-highlighter-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +f024368b33bfb7c1589aaf424992e474c4e3be38 \ No newline at end of file diff --git a/core/licenses/lucene-join-7.0.0.jar.sha1 b/core/licenses/lucene-join-7.0.0.jar.sha1 deleted file mode 100644 index a537f656f6497..0000000000000 --- a/core/licenses/lucene-join-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4706ea3c072dea6ba6540e0c8fce56fb5704fb16 \ No newline at end of file diff --git a/core/licenses/lucene-join-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-join-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..684763a1fa6a7 --- /dev/null +++ b/core/licenses/lucene-join-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +7b525cb2e2c8403543fefc09b972c78b86d2f0da \ No newline at end of file diff --git a/core/licenses/lucene-memory-7.0.0.jar.sha1 b/core/licenses/lucene-memory-7.0.0.jar.sha1 deleted file mode 100644 index 049af9157b2e9..0000000000000 --- a/core/licenses/lucene-memory-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dacd0a3717857954d83b47e46d293d1909368287 \ No newline at end of file diff --git a/core/licenses/lucene-memory-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-memory-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..47d82587ef0e1 --- /dev/null +++ b/core/licenses/lucene-memory-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +61cc3ced15fa80d8f97affe0c8df9818eeb8af49 \ No newline at end of file diff --git a/core/licenses/lucene-misc-7.0.0.jar.sha1 b/core/licenses/lucene-misc-7.0.0.jar.sha1 deleted file mode 100644 index 6a96c204e125f..0000000000000 --- a/core/licenses/lucene-misc-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -788bf81b746d8c445ba8abae09083b80dc571422 \ No newline at end of file diff --git a/core/licenses/lucene-misc-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-misc-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..adfb48c3cbd46 --- /dev/null +++ b/core/licenses/lucene-misc-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +03a71b5875d25576c9f8992822db65fb181f4328 \ No newline at end of file diff --git a/core/licenses/lucene-queries-7.0.0.jar.sha1 b/core/licenses/lucene-queries-7.0.0.jar.sha1 deleted file mode 100644 index 28ab8d34b6a01..0000000000000 --- a/core/licenses/lucene-queries-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c6168c23fd1bec9b57077693361b014b44000117 \ No newline at end of file diff --git a/core/licenses/lucene-queries-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-queries-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..c1f9c01c22123 --- /dev/null +++ b/core/licenses/lucene-queries-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +9c07c15b2c6f8bd3d75e0f53fff5631f012bff98 \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-7.0.0.jar.sha1 b/core/licenses/lucene-queryparser-7.0.0.jar.sha1 deleted file mode 100644 index 3eb91931dd08f..0000000000000 --- a/core/licenses/lucene-queryparser-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d007674d657274ada4f1fb29b87507aecdbcc5b \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-queryparser-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..380eb78880477 --- /dev/null +++ b/core/licenses/lucene-queryparser-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +e0a7815981d096d96e7dc41b1c063cd78c91132d \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-7.0.0.jar.sha1 b/core/licenses/lucene-sandbox-7.0.0.jar.sha1 deleted file mode 100644 index e498ebfa2b0ce..0000000000000 --- a/core/licenses/lucene-sandbox-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3b84a7eccc77a28be0b7f30fa40de22d89ea028 \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-sandbox-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..5348255939923 --- /dev/null +++ b/core/licenses/lucene-sandbox-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +1ea14867a6bc545fb2e09dd1f31b48523cdbc040 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-7.0.0.jar.sha1 b/core/licenses/lucene-spatial-7.0.0.jar.sha1 deleted file mode 100644 index 86d66c7f62e92..0000000000000 --- a/core/licenses/lucene-spatial-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d947e80f5214b293bba319a625eb6971941846ac \ No newline at end of file diff --git a/core/licenses/lucene-spatial-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-spatial-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..18afbd6349384 --- /dev/null +++ b/core/licenses/lucene-spatial-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +58ce824ebc6126e37ff232c96a561a659377a873 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-7.0.0.jar.sha1 b/core/licenses/lucene-spatial-extras-7.0.0.jar.sha1 deleted file mode 100644 index 76101fe60c749..0000000000000 --- a/core/licenses/lucene-spatial-extras-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ac15765c81ab4151e7a6c56ba544f3cb9860d183 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-spatial-extras-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..b07b6ca6d9c3e --- /dev/null +++ b/core/licenses/lucene-spatial-extras-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +3fcd89a8cda5ee2049c189b06b5e30258b1aa198 \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-7.0.0.jar.sha1 b/core/licenses/lucene-spatial3d-7.0.0.jar.sha1 deleted file mode 100644 index 1cd5d487210f8..0000000000000 --- a/core/licenses/lucene-spatial3d-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5a795ee7914e396236ba04b0b00839e1fd2038c4 \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-spatial3d-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..a3be4c237dd8e --- /dev/null +++ b/core/licenses/lucene-spatial3d-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +1d1ada8fbb1b2bbbc88e9f29e28802a7b44a6665 \ No newline at end of file diff --git a/core/licenses/lucene-suggest-7.0.0.jar.sha1 b/core/licenses/lucene-suggest-7.0.0.jar.sha1 deleted file mode 100644 index 19d2dbc4339e9..0000000000000 --- a/core/licenses/lucene-suggest-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1a17d34de6ab52050a3f33361d4ead96eb0f344c \ No newline at end of file diff --git a/core/licenses/lucene-suggest-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/core/licenses/lucene-suggest-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..e7534fc3cf7c0 --- /dev/null +++ b/core/licenses/lucene-suggest-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +fb7f18e6a81899e3ac95760b56bea21ebf143cf9 \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index b75122a9488a4..30d6c142100d6 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -118,10 +118,10 @@ public class Version implements Comparable { new Version(V_6_0_0_rc2_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); public static final int V_6_1_0_ID = 6010099; public static final Version V_6_1_0 = - new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); + new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); public static final int V_7_0_0_alpha1_ID = 7000001; public static final Version V_7_0_0_alpha1 = - new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); + new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); public static final Version CURRENT = V_7_0_0_alpha1; // unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index 7944d969ab57f..e8c797e45a09c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -20,13 +20,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import org.elasticsearch.cluster.SnapshotsInProgress.State; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.snapshots.Snapshot; import java.io.IOException; @@ -160,15 +159,7 @@ public static SnapshotStatus readSnapshotStatus(StreamInput in) throws IOExcepti @Override public String toString() { - try { - XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); - builder.startObject(); - toXContent(builder, EMPTY_PARAMS); - builder.endObject(); - return builder.string(); - } catch (IOException e) { - return "{ \"error\" : \"" + e.getMessage() + "\"}"; - } + return Strings.toString(this, true, false); } /** diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java index af92c8d0a21bc..3109fa4d405ac 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java @@ -83,14 +83,8 @@ protected void masterOperation(GetSettingsRequest request, ClusterState state, A if (request.humanReadable()) { settings = IndexMetaData.addHumanReadableSettings(settings); } - if (!CollectionUtils.isEmpty(request.names())) { - Settings.Builder settingsBuilder = Settings.builder(); - for (Map.Entry entry : settings.getAsMap().entrySet()) { - if (Regex.simpleMatch(request.names(), entry.getKey())) { - settingsBuilder.put(entry.getKey(), entry.getValue()); - } - } - settings = settingsBuilder.build(); + if (CollectionUtils.isEmpty(request.names()) == false) { + settings = settings.filter(k -> Regex.simpleMatch(request.names(), k)); } indexToSettingsBuilder.put(concreteIndex.getName(), settings); } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index d4fd7b609ee4d..8a4c8b0882f08 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -40,6 +40,7 @@ import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalScrollSearchRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; @@ -320,7 +321,8 @@ public void messageReceived(ScrollFreeContextRequest request, TransportChannel c channel.sendResponse(new SearchFreeContextResponse(freed)); } }); - TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, SearchFreeContextResponse::new); + TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, + (Supplier) SearchFreeContextResponse::new); transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, SearchFreeContextRequest::new, new TaskAwareTransportRequestHandler() { @Override @@ -329,7 +331,8 @@ public void messageReceived(SearchFreeContextRequest request, TransportChannel c channel.sendResponse(new SearchFreeContextResponse(freed)); } }); - TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, SearchFreeContextResponse::new); + TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, + (Supplier) SearchFreeContextResponse::new); transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportRequest.Empty.INSTANCE, ThreadPool.Names.SAME, new TaskAwareTransportRequestHandler() { @Override @@ -339,7 +342,7 @@ public void messageReceived(TransportRequest.Empty request, TransportChannel cha } }); TransportActionProxy.registerProxyAction(transportService, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, - () -> TransportResponse.Empty.INSTANCE); + () -> TransportResponse.Empty.INSTANCE); transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, new TaskAwareTransportRequestHandler() { @@ -394,7 +397,8 @@ public void onFailure(Exception e) { }); } }); - TransportActionProxy.registerProxyAction(transportService, QUERY_ACTION_NAME, QuerySearchResult::new); + TransportActionProxy.registerProxyAction(transportService, QUERY_ACTION_NAME, + (request) -> ((ShardSearchRequest)request).numberOfShards() == 1 ? QueryFetchSearchResult::new : QuerySearchResult::new); transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, ThreadPool.Names.SEARCH, QuerySearchRequest::new, new TaskAwareTransportRequestHandler() { @@ -455,7 +459,8 @@ public void messageReceived(ShardSearchTransportRequest request, TransportChanne channel.sendResponse(new CanMatchResponse(canMatch)); } }); - TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NAME, CanMatchResponse::new); + TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NAME, + (Supplier) CanMatchResponse::new); } public static final class CanMatchResponse extends SearchPhaseResult { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 3be0e60f7268a..06f203595b313 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -1078,9 +1078,7 @@ public static void toXContent(IndexMetaData indexMetaData, XContentBuilder build boolean binary = params.paramAsBoolean("binary", false); builder.startObject(KEY_SETTINGS); - for (Map.Entry entry : indexMetaData.getSettings().getAsMap().entrySet()) { - builder.field(entry.getKey(), entry.getValue()); - } + indexMetaData.getSettings().toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true"))); builder.endObject(); builder.startArray(KEY_MAPPINGS); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index d023c471493ab..c582f372e517a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -1000,17 +1000,13 @@ public static void toXContent(MetaData metaData, XContentBuilder builder, ToXCon if (!metaData.persistentSettings().isEmpty()) { builder.startObject("settings"); - for (Map.Entry entry : metaData.persistentSettings().getAsMap().entrySet()) { - builder.field(entry.getKey(), entry.getValue()); - } + metaData.persistentSettings().toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true"))); builder.endObject(); } if (context == XContentContext.API && !metaData.transientSettings().isEmpty()) { builder.startObject("transient_settings"); - for (Map.Entry entry : metaData.transientSettings().getAsMap().entrySet()) { - builder.field(entry.getKey(), entry.getValue()); - } + metaData.transientSettings().toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true"))); builder.endObject(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 39456f3d0c63a..abc0a4e8ea2de 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -165,7 +165,7 @@ public void updateSettings(final UpdateSettingsClusterStateUpdateRequest request indexScopedSettings.validate(normalizedSettings); // never allow to change the number of shards - for (String key : normalizedSettings.getKeys()) { + for (String key : normalizedSettings.keySet()) { Setting setting = indexScopedSettings.get(key); assert setting != null; // we already validated the normalized settings settingsForClosedIndices.copy(key, normalizedSettings); @@ -211,8 +211,7 @@ public ClusterState execute(ClusterState currentState) { if (!skippedSettings.isEmpty() && !openIndices.isEmpty()) { throw new IllegalArgumentException(String.format(Locale.ROOT, - "Can't update non dynamic settings [%s] for open indices %s", skippedSettings, openIndices - )); + "Can't update non dynamic settings [%s] for open indices %s", skippedSettings, openIndices)); } int updatedNumberOfReplicas = openSettings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, -1); diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeFilters.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeFilters.java index 4c3910ff5ea1c..6b15d1f24581d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeFilters.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeFilters.java @@ -30,7 +30,6 @@ import java.util.HashMap; import java.util.Map; import java.util.function.BiConsumer; -import java.util.function.Consumer; public class DiscoveryNodeFilters { @@ -56,10 +55,6 @@ public enum OpType { } }; - public static DiscoveryNodeFilters buildFromSettings(OpType opType, String prefix, Settings settings) { - return buildFromKeyValue(opType, settings.getByPrefix(prefix).getAsMap()); - } - public static DiscoveryNodeFilters buildFromKeyValue(OpType opType, Map filters) { Map bFilters = new HashMap<>(); for (Map.Entry entry : filters.entrySet()) { diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 1d6fdf9cd2df2..d8f2ebe9be843 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -37,7 +37,7 @@ private ESLoggerFactory() { public static final Setting LOG_DEFAULT_LEVEL_SETTING = new Setting<>("logger.level", Level.INFO.name(), Level::valueOf, Property.NodeScope); - public static final Setting LOG_LEVEL_SETTING = + public static final Setting.AffixSetting LOG_LEVEL_SETTING = Setting.prefixKeySetting("logger.", (key) -> new Setting<>(key, Level.INFO.name(), Level::valueOf, Property.Dynamic, Property.NodeScope)); diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 0024d83b80506..b97fc13e73038 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -52,7 +52,6 @@ import java.util.ArrayList; import java.util.EnumSet; import java.util.List; -import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; @@ -182,15 +181,12 @@ private static void configureLoggerLevels(final Settings settings) { final Level level = ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(settings); Loggers.setLevel(ESLoggerFactory.getRootLogger(), level); } - - final Map levels = settings.filter(ESLoggerFactory.LOG_LEVEL_SETTING::match).getAsMap(); - for (final String key : levels.keySet()) { + ESLoggerFactory.LOG_LEVEL_SETTING.getAllConcreteSettings(settings) // do not set a log level for a logger named level (from the default log setting) - if (!key.equals(ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.getKey())) { - final Level level = ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings); - Loggers.setLevel(ESLoggerFactory.getLogger(key.substring("logger.".length())), level); - } - } + .filter(s -> s.getKey().equals(ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.getKey()) == false).forEach(s -> { + final Level level = s.get(settings); + Loggers.setLevel(ESLoggerFactory.getLogger(s.getKey().substring("logger.".length())), level); + }); } /** diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index b156c9bb2961e..597fa970a57ae 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -40,21 +40,17 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.search.Collector; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; -import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; -import org.apache.lucene.search.TimeLimitingCollector; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TwoPhaseIterator; @@ -66,9 +62,7 @@ import org.apache.lucene.store.Lock; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.Counter; import org.apache.lucene.util.Version; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -87,7 +81,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Objects; public class Lucene { public static final String LATEST_DOC_VALUES_FORMAT = "Lucene70"; @@ -769,7 +762,7 @@ public static Bits asSequentialAccessBits(final int maxDoc, @Nullable ScorerSupp return new Bits.MatchNoBits(maxDoc); } // Since we want bits, we need random-access - final Scorer scorer = scorerSupplier.get(true); // this never returns null + final Scorer scorer = scorerSupplier.get(Long.MAX_VALUE); // this never returns null final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); final DocIdSetIterator iterator; if (twoPhase == null) { diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index a8356bfe10f82..8cb13647fb6af 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -63,7 +63,6 @@ public final class NetworkModule { public static final String TRANSPORT_TYPE_KEY = "transport.type"; public static final String HTTP_TYPE_KEY = "http.type"; - public static final String LOCAL_TRANSPORT = "local"; public static final String HTTP_TYPE_DEFAULT_KEY = "http.type.default"; public static final String TRANSPORT_TYPE_DEFAULT_KEY = "transport.type.default"; diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 0f0d1906bf82d..61f32c67c20cb 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -86,7 +86,7 @@ protected AbstractScopedSettings(Settings settings, Set> settingsSet, protected void validateSettingKey(Setting setting) { if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey()) - || isValidAffixKey(setting.getKey())) == false) { + || isValidAffixKey(setting.getKey())) == false || setting.getKey().endsWith(".0")) { throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); } } @@ -503,24 +503,25 @@ private boolean updateSettings(Settings toApply, Settings.Builder target, Settin (onlyDynamic && isDynamicSetting(key) // it's a dynamicSetting and we only do dynamic settings || get(key) == null && key.startsWith(ARCHIVED_SETTINGS_PREFIX) // the setting is not registered AND it's been archived || (onlyDynamic == false && get(key) != null))); // if it's not dynamic AND we have a key - for (Map.Entry entry : toApply.getAsMap().entrySet()) { - if (entry.getValue() == null && (canRemove.test(entry.getKey()) || entry.getKey().endsWith("*"))) { + for (String key : toApply.keySet()) { + boolean isNull = toApply.get(key) == null; + if (isNull && (canRemove.test(key) || key.endsWith("*"))) { // this either accepts null values that suffice the canUpdate test OR wildcard expressions (key ends with *) // we don't validate if there is any dynamic setting with that prefix yet we could do in the future - toRemove.add(entry.getKey()); + toRemove.add(key); // we don't set changed here it's set after we apply deletes below if something actually changed - } else if (get(entry.getKey()) == null) { - throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not recognized"); - } else if (entry.getValue() != null && canUpdate.test(entry.getKey())) { - validate(entry.getKey(), toApply); - settingsBuilder.put(entry.getKey(), entry.getValue()); - updates.put(entry.getKey(), entry.getValue()); + } else if (get(key) == null) { + throw new IllegalArgumentException(type + " setting [" + key + "], not recognized"); + } else if (isNull == false && canUpdate.test(key)) { + validate(key, toApply); + settingsBuilder.copy(key, toApply); + updates.copy(key, toApply); changed = true; } else { - if (isFinalSetting(entry.getKey())) { - throw new IllegalArgumentException("final " + type + " setting [" + entry.getKey() + "], not updateable"); + if (isFinalSetting(key)) { + throw new IllegalArgumentException("final " + type + " setting [" + key + "], not updateable"); } else { - throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); + throw new IllegalArgumentException(type + " setting [" + key + "], not dynamically updateable"); } } } @@ -533,7 +534,7 @@ private static boolean applyDeletes(Set deletes, Settings.Builder builde boolean changed = false; for (String entry : deletes) { Set keysToRemove = new HashSet<>(); - Set keySet = builder.internalMap().keySet(); + Set keySet = builder.keys(); for (String key : keySet) { if (Regex.simpleMatch(entry, key) && canRemove.test(key)) { // we have to re-check with canRemove here since we might have a wildcard expression foo.* that matches @@ -584,35 +585,35 @@ public Settings archiveUnknownOrInvalidSettings( final BiConsumer, IllegalArgumentException> invalidConsumer) { Settings.Builder builder = Settings.builder(); boolean changed = false; - for (Map.Entry entry : settings.getAsMap().entrySet()) { + for (String key : settings.keySet()) { try { - Setting setting = get(entry.getKey()); + Setting setting = get(key); if (setting != null) { setting.get(settings); - builder.put(entry.getKey(), entry.getValue()); + builder.copy(key, settings); } else { - if (entry.getKey().startsWith(ARCHIVED_SETTINGS_PREFIX) || isPrivateSetting(entry.getKey())) { - builder.put(entry.getKey(), entry.getValue()); + if (key.startsWith(ARCHIVED_SETTINGS_PREFIX) || isPrivateSetting(key)) { + builder.copy(key, settings); } else { changed = true; - unknownConsumer.accept(entry); + unknownConsumer.accept(new Entry(key, settings)); /* * We put them back in here such that tools can check from the outside if there are any indices with invalid * settings. The setting can remain there but we want users to be aware that some of their setting are invalid and * they can research why and what they need to do to replace them. */ - builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue()); + builder.copy(ARCHIVED_SETTINGS_PREFIX + key, key, settings); } } } catch (IllegalArgumentException ex) { changed = true; - invalidConsumer.accept(entry, ex); + invalidConsumer.accept(new Entry(key, settings), ex); /* * We put them back in here such that tools can check from the outside if there are any indices with invalid settings. The * setting can remain there but we want users to be aware that some of their setting are invalid and they can research why * and what they need to do to replace them. */ - builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue()); + builder.copy(ARCHIVED_SETTINGS_PREFIX + key, key, settings); } } if (changed) { @@ -622,6 +623,32 @@ public Settings archiveUnknownOrInvalidSettings( } } + private static final class Entry implements Map.Entry { + + private final String key; + private final Settings settings; + + private Entry(String key, Settings settings) { + this.key = key; + this.settings = settings; + } + + @Override + public String getKey() { + return key; + } + + @Override + public String getValue() { + return settings.get(key); + } + + @Override + public String setValue(String value) { + throw new UnsupportedOperationException(); + } + } + /** * Returns true iff the setting is a private setting ie. it should be treated as valid even though it has no internal * representation. Otherwise false diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 4704840d7b5f6..1ade10e4c7dd1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -119,15 +119,15 @@ private static final class LoggingSettingUpdater implements SettingUpdatertrue iff this setting is present in the given settings object. Otherwise false */ public boolean exists(Settings settings) { - return settings.getAsMap().containsKey(getKey()); + return settings.keySet().contains(getKey()); } /** @@ -529,7 +530,7 @@ boolean isGroupSetting() { } private Stream matchStream(Settings settings) { - return settings.getAsMap().keySet().stream().filter((key) -> match(key)).map(settingKey -> key.getConcreteString(settingKey)); + return settings.keySet().stream().filter((key) -> match(key)).map(settingKey -> key.getConcreteString(settingKey)); } AbstractScopedSettings.SettingUpdater, T>> newAffixUpdater( @@ -736,8 +737,8 @@ public Settings get(Settings settings) { @Override public boolean exists(Settings settings) { - for (Map.Entry entry : settings.getAsMap().entrySet()) { - if (entry.getKey().startsWith(key)) { + for (String settingsKey : settings.keySet()) { + if (settingsKey.startsWith(key)) { return true; } } @@ -746,13 +747,11 @@ public boolean exists(Settings settings) { @Override public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) { - Map leftGroup = get(source).getAsMap(); + Set leftGroup = get(source).keySet(); Settings defaultGroup = get(defaultSettings); - for (Map.Entry entry : defaultGroup.getAsMap().entrySet()) { - if (leftGroup.containsKey(entry.getKey()) == false) { - builder.put(getKey() + entry.getKey(), entry.getValue()); - } - } + + builder.put(Settings.builder().put(defaultGroup.filter(k -> leftGroup.contains(k) == false), false) + .normalizePrefix(getKey()).build(), false); } @Override @@ -779,7 +778,7 @@ public Settings getValue(Settings current, Settings previous) { validator.accept(currentSettings); } catch (Exception | AssertionError e) { throw new IllegalArgumentException("illegal value can't update [" + key + "] from [" - + previousSettings.getAsMap() + "] to [" + currentSettings.getAsMap() + "]", e); + + previousSettings + "] to [" + currentSettings+ "]", e); } return currentSettings; } @@ -821,12 +820,6 @@ boolean hasComplexMatcher() { return true; } - @Override - public boolean exists(Settings settings) { - boolean exists = super.exists(settings); - return exists || settings.get(getKey() + ".0") != null; - } - @Override public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) { if (exists(source) == false) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 04b10d7003848..a1adef3338662 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -47,6 +47,7 @@ import java.io.IOException; import java.io.InputStream; +import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.security.GeneralSecurityException; @@ -55,23 +56,18 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Dictionary; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.NoSuchElementException; -import java.util.Objects; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.UnaryOperator; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -85,10 +81,9 @@ public final class Settings implements ToXContentFragment { public static final Settings EMPTY = new Builder().build(); - private static final Pattern ARRAY_PATTERN = Pattern.compile("(.*)\\.\\d+$"); /** The raw settings from the full key to raw string value. */ - private final Map settings; + private final Map settings; /** The secure settings storage associated with these settings. */ private final SecureSettings secureSettings; @@ -102,7 +97,7 @@ public final class Settings implements ToXContentFragment { */ private final SetOnce> keys = new SetOnce<>(); - Settings(Map settings, SecureSettings secureSettings) { + Settings(Map settings, SecureSettings secureSettings) { // we use a sorted map for consistent serialization when using getAsMap() this.settings = Collections.unmodifiableSortedMap(new TreeMap<>(settings)); this.secureSettings = secureSettings; @@ -116,18 +111,9 @@ SecureSettings getSecureSettings() { return secureSettings; } - /** - * The settings as a flat {@link java.util.Map}. - * @return an unmodifiable map of settings - */ - public Map getAsMap() { - // settings is always unmodifiable - return this.settings; - } - private Map getAsStructuredMap() { Map map = new HashMap<>(2); - for (Map.Entry entry : settings.entrySet()) { + for (Map.Entry entry : settings.entrySet()) { processSetting(map, "", entry.getKey(), entry.getValue()); } for (Map.Entry entry : map.entrySet()) { @@ -140,7 +126,7 @@ private Map getAsStructuredMap() { return map; } - private void processSetting(Map map, String prefix, String setting, String value) { + private void processSetting(Map map, String prefix, String setting, Object value) { int prefixLength = setting.indexOf('.'); if (prefixLength == -1) { @SuppressWarnings("unchecked") Map innerMap = (Map) map.get(prefix + setting); @@ -244,7 +230,7 @@ public Settings getAsSettings(String setting) { * @return The setting value, null if it does not exists. */ public String get(String setting) { - return settings.get(setting); + return toString(settings.get(setting)); } /** @@ -320,13 +306,6 @@ public Long getAsLong(String setting, Long defaultValue) { } } - /** - * Returns a set of all keys in this settings object - */ - public Set getKeys() { - return Collections.unmodifiableSet(settings.keySet()); - } - /** * We have to lazy initialize the deprecation logger as otherwise a static logger here would be constructed before logging is configured * leading to a runtime failure (see {@link LogConfigurator#checkErrorListener()} ). The premature construction would come from any @@ -387,83 +366,60 @@ public SizeValue getAsSize(String setting, SizeValue defaultValue) throws Settin } /** - * The values associated with a setting prefix as an array. The settings array is in the format of: - * settingPrefix.[index]. + * The values associated with a setting key as an array. *

* It will also automatically load a comma separated list under the settingPrefix and merge with * the numbered format. * - * @param settingPrefix The setting prefix to load the array by + * @param key The setting prefix to load the array by * @return The setting array values */ - public String[] getAsArray(String settingPrefix) throws SettingsException { - return getAsArray(settingPrefix, Strings.EMPTY_ARRAY, true); + public String[] getAsArray(String key) throws SettingsException { + return getAsArray(key, Strings.EMPTY_ARRAY, true); } /** - * The values associated with a setting prefix as an array. The settings array is in the format of: - * settingPrefix.[index]. + * The values associated with a setting key as an array. *

* If commaDelimited is true, it will automatically load a comma separated list under the settingPrefix and merge with * the numbered format. * - * @param settingPrefix The setting prefix to load the array by + * @param key The setting key to load the array by * @return The setting array values */ - public String[] getAsArray(String settingPrefix, String[] defaultArray) throws SettingsException { - return getAsArray(settingPrefix, defaultArray, true); + public String[] getAsArray(String key, String[] defaultArray) throws SettingsException { + return getAsArray(key, defaultArray, true); } /** - * The values associated with a setting prefix as an array. The settings array is in the format of: - * settingPrefix.[index]. + * The values associated with a setting key as an array. *

* It will also automatically load a comma separated list under the settingPrefix and merge with * the numbered format. * - * @param settingPrefix The setting prefix to load the array by + * @param key The setting key to load the array by * @param defaultArray The default array to use if no value is specified * @param commaDelimited Whether to try to parse a string as a comma-delimited value * @return The setting array values */ - public String[] getAsArray(String settingPrefix, String[] defaultArray, Boolean commaDelimited) throws SettingsException { + public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelimited) throws SettingsException { List result = new ArrayList<>(); - - final String valueFromPrefix = get(settingPrefix); - final String valueFromPreifx0 = get(settingPrefix + ".0"); - - if (valueFromPrefix != null && valueFromPreifx0 != null) { - final String message = String.format( - Locale.ROOT, - "settings object contains values for [%s=%s] and [%s=%s]", - settingPrefix, - valueFromPrefix, - settingPrefix + ".0", - valueFromPreifx0); - throw new IllegalStateException(message); - } - - if (get(settingPrefix) != null) { - if (commaDelimited) { - String[] strings = Strings.splitStringByCommaToArray(get(settingPrefix)); + final Object valueFromPrefix = settings.get(key); + if (valueFromPrefix != null) { + if (valueFromPrefix instanceof List) { + result = ((List) valueFromPrefix); + } else if (commaDelimited) { + String[] strings = Strings.splitStringByCommaToArray(get(key)); if (strings.length > 0) { for (String string : strings) { result.add(string.trim()); } } } else { - result.add(get(settingPrefix).trim()); + result.add(get(key).trim()); } } - int counter = 0; - while (true) { - String value = get(settingPrefix + '.' + (counter++)); - if (value == null) { - break; - } - result.add(value.trim()); - } if (result.isEmpty()) { return defaultArray; } @@ -564,7 +520,7 @@ public Set names() { */ public String toDelimitedString(char delimiter) { StringBuilder sb = new StringBuilder(); - for (Map.Entry entry : settings.entrySet()) { + for (Map.Entry entry : settings.entrySet()) { sb.append(entry.getKey()).append("=").append(entry.getValue()).append(delimiter); } return sb.toString(); @@ -589,19 +545,52 @@ public int hashCode() { public static Settings readSettingsFromStream(StreamInput in) throws IOException { Builder builder = new Builder(); int numberOfSettings = in.readVInt(); - for (int i = 0; i < numberOfSettings; i++) { - builder.put(in.readString(), in.readOptionalString()); + if (in.getVersion().onOrAfter(Version.V_6_1_0)) { + for (int i = 0; i < numberOfSettings; i++) { + String key = in.readString(); + Object value = in.readGenericValue(); + if (value == null) { + builder.putNull(key); + } else if (value instanceof List) { + builder.putArray(key, (List) value); + } else { + builder.put(key, value.toString()); + } + } + } else { + for (int i = 0; i < numberOfSettings; i++) { + String key = in.readString(); + String value = in.readOptionalString(); + builder.put(key, value); + } } return builder.build(); } public static void writeSettingsToStream(Settings settings, StreamOutput out) throws IOException { - // pull getAsMap() to exclude secure settings in size() - Set> entries = settings.getAsMap().entrySet(); - out.writeVInt(entries.size()); - for (Map.Entry entry : entries) { - out.writeString(entry.getKey()); - out.writeOptionalString(entry.getValue()); + // pull settings to exclude secure settings in size() + Set> entries = settings.settings.entrySet(); + if (out.getVersion().onOrAfter(Version.V_6_1_0)) { + out.writeVInt(entries.size()); + for (Map.Entry entry : entries) { + out.writeString(entry.getKey()); + out.writeGenericValue(entry.getValue()); + } + } else { + int size = entries.stream().mapToInt(e -> e.getValue() instanceof List ? ((List)e.getValue()).size() : 1).sum(); + out.writeVInt(size); + for (Map.Entry entry : entries) { + if (entry.getValue() instanceof List) { + int idx = 0; + for (String value : (List)entry.getValue()) { + out.writeString(entry.getKey() + "." + idx++); + out.writeOptionalString(value); + } + } else { + out.writeString(entry.getKey()); + out.writeOptionalString(toString(entry.getValue())); + } + } } } @@ -620,7 +609,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(entry.getKey(), entry.getValue()); } } else { - for (Map.Entry entry : settings.getAsMap().entrySet()) { + for (Map.Entry entry : settings.settings.entrySet()) { builder.field(entry.getKey(), entry.getValue()); } } @@ -636,9 +625,7 @@ public static Settings fromXContent(XContentParser parser) throws IOException { return fromXContent(parser, true, false); } - private static Settings fromXContent(XContentParser parser, boolean allowNullValues, - boolean validateEndOfStream) - throws IOException { + private static Settings fromXContent(XContentParser parser, boolean allowNullValues, boolean validateEndOfStream) throws IOException { if (parser.currentToken() == null) { parser.nextToken(); } @@ -780,7 +767,7 @@ public static class Builder { public static final Settings EMPTY_SETTINGS = new Builder().build(); // we use a sorted map for consistent serialization when using getAsMap() - private final Map map = new TreeMap<>(); + private final Map map = new TreeMap<>(); private SetOnce secureSettings = new SetOnce<>(); @@ -788,22 +775,22 @@ private Builder() { } - public Map internalMap() { - return this.map; + public Set keys() { + return this.map.keySet(); } /** * Removes the provided setting from the internal map holding the current list of settings. */ public String remove(String key) { - return map.remove(key); + return Settings.toString(map.remove(key)); } /** * Returns a setting value based on the setting key. */ public String get(String key) { - return map.get(key); + return Settings.toString(map.get(key)); } /** Return the current secure settings, or {@code null} if none have been set. */ @@ -902,7 +889,21 @@ public Builder put(String key, String value) { } public Builder copy(String key, Settings source) { - return put(key, source.get(key)); + return copy(key, key, source); + } + + public Builder copy(String key, String sourceKey, Settings source) { + if (source.settings.containsKey(sourceKey) == false) { + throw new IllegalArgumentException("source key not found in the source settings"); + } + final Object value = source.settings.get(sourceKey); + if (value instanceof List) { + return putArray(key, (List)value); + } else if (value == null) { + return putNull(key); + } else { + return put(key, Settings.toString(value)); + } } /** @@ -1034,16 +1035,7 @@ public Builder putArray(String setting, String... values) { */ public Builder putArray(String setting, List values) { remove(setting); - int counter = 0; - while (true) { - String value = map.remove(setting + '.' + (counter++)); - if (value == null) { - break; - } - } - for (int i = 0; i < values.size(); i++) { - put(setting + "." + i, values.get(i)); - } + map.put(setting, Collections.unmodifiableList(new ArrayList<>(values))); return this; } @@ -1076,55 +1068,41 @@ public Builder put(Settings settings) { * @param copySecureSettings if true all settings including secure settings are copied. */ public Builder put(Settings settings, boolean copySecureSettings) { - removeNonArraysFieldsIfNewSettingsContainsFieldAsArray(settings.getAsMap()); - map.putAll(settings.getAsMap()); + Map settingsMap = new HashMap<>(settings.settings); + processLegacyLists(settingsMap); + map.putAll(settingsMap); if (copySecureSettings && settings.getSecureSettings() != null) { setSecureSettings(settings.getSecureSettings()); } return this; } - /** - * Removes non array values from the existing map, if settings contains an array value instead - * - * Example: - * Existing map contains: {key:value} - * New map contains: {key:[value1,value2]} (which has been flattened to {}key.0:value1,key.1:value2}) - * - * This ensure that that the 'key' field gets removed from the map in order to override all the - * data instead of merging - */ - private void removeNonArraysFieldsIfNewSettingsContainsFieldAsArray(Map settings) { - List prefixesToRemove = new ArrayList<>(); - for (final Map.Entry entry : settings.entrySet()) { - final Matcher matcher = ARRAY_PATTERN.matcher(entry.getKey()); - if (matcher.matches()) { - prefixesToRemove.add(matcher.group(1)); - } else if (map.keySet().stream().anyMatch(key -> key.startsWith(entry.getKey() + "."))) { - prefixesToRemove.add(entry.getKey()); - } - } - for (String prefix : prefixesToRemove) { - Iterator> iterator = map.entrySet().iterator(); - while (iterator.hasNext()) { - Map.Entry entry = iterator.next(); - if (entry.getKey().startsWith(prefix + ".") || entry.getKey().equals(prefix)) { - iterator.remove(); + private void processLegacyLists(Map map) { + String[] array = map.keySet().toArray(new String[map.size()]); + for (String key : array) { + if (key.endsWith(".0")) { // let's only look at the head of the list and convert in order starting there. + int counter = 0; + String prefix = key.substring(0, key.lastIndexOf('.')); + if (map.containsKey(prefix)) { + throw new IllegalStateException("settings builder can't contain values for [" + prefix + "=" + map.get(prefix) + + "] and [" + key + "=" + map.get(key) + "]"); + } + List values = new ArrayList<>(); + while (true) { + String listKey = prefix + '.' + (counter++); + String value = get(listKey); + if (value == null) { + map.put(prefix, values); + break; + } else { + values.add(value); + map.remove(listKey); + } } } } } - /** - * Sets all the provided settings. - */ - public Builder put(Dictionary properties) { - for (Object key : Collections.list(properties.keys())) { - map.put(Objects.toString(key), Objects.toString(properties.get(key))); - } - return this; - } - /** * Loads settings from the actual string content that represents them using {@link #fromXContent(XContentParser)} */ @@ -1202,7 +1180,7 @@ public String resolvePlaceholder(String placeholderName) { if (value != null) { return value; } - return map.get(placeholderName); + return Settings.toString(map.get(placeholderName)); } @Override @@ -1222,14 +1200,14 @@ public boolean shouldRemoveMissingPlaceholder(String placeholderName) { } }; - Iterator> entryItr = map.entrySet().iterator(); + Iterator> entryItr = map.entrySet().iterator(); while (entryItr.hasNext()) { - Map.Entry entry = entryItr.next(); - if (entry.getValue() == null) { + Map.Entry entry = entryItr.next(); + if (entry.getValue() == null || entry.getValue() instanceof List) { // a null value obviously can't be replaced continue; } - String value = propertyPlaceholder.replacePlaceholders(entry.getValue(), placeholderResolver); + String value = propertyPlaceholder.replacePlaceholders(Settings.toString(entry.getValue()), placeholderResolver); // if the values exists and has length, we should maintain it in the map // otherwise, the replace process resolved into removing it if (Strings.hasLength(value)) { @@ -1247,10 +1225,10 @@ public boolean shouldRemoveMissingPlaceholder(String placeholderName) { * If a setting doesn't start with the prefix, the builder appends the prefix to such setting. */ public Builder normalizePrefix(String prefix) { - Map replacements = new HashMap<>(); - Iterator> iterator = map.entrySet().iterator(); + Map replacements = new HashMap<>(); + Iterator> iterator = map.entrySet().iterator(); while(iterator.hasNext()) { - Map.Entry entry = iterator.next(); + Map.Entry entry = iterator.next(); if (entry.getKey().startsWith(prefix) == false) { replacements.put(prefix + entry.getKey(), entry.getValue()); iterator.remove(); @@ -1265,30 +1243,31 @@ public Builder normalizePrefix(String prefix) { * set on this builder. */ public Settings build() { + processLegacyLists(map); return new Settings(map, secureSettings.get()); } } // TODO We could use an FST internally to make things even faster and more compact - private static final class FilteredMap extends AbstractMap { - private final Map delegate; + private static final class FilteredMap extends AbstractMap { + private final Map delegate; private final Predicate filter; private final String prefix; // we cache that size since we have to iterate the entire set // this is safe to do since this map is only used with unmodifiable maps private int size = -1; @Override - public Set> entrySet() { - Set> delegateSet = delegate.entrySet(); - AbstractSet> filterSet = new AbstractSet>() { + public Set> entrySet() { + Set> delegateSet = delegate.entrySet(); + AbstractSet> filterSet = new AbstractSet>() { @Override - public Iterator> iterator() { - Iterator> iter = delegateSet.iterator(); + public Iterator> iterator() { + Iterator> iter = delegateSet.iterator(); - return new Iterator>() { + return new Iterator>() { private int numIterated; - private Entry currentElement; + private Entry currentElement; @Override public boolean hasNext() { if (currentElement != null) { @@ -1311,29 +1290,29 @@ public boolean hasNext() { } @Override - public Entry next() { + public Entry next() { if (currentElement == null && hasNext() == false) { // protect against no #hasNext call or not respecting it throw new NoSuchElementException("make sure to call hasNext first"); } - final Entry current = this.currentElement; + final Entry current = this.currentElement; this.currentElement = null; if (prefix == null) { return current; } - return new Entry() { + return new Entry() { @Override public String getKey() { return current.getKey().substring(prefix.length()); } @Override - public String getValue() { + public Object getValue() { return current.getValue(); } @Override - public String setValue(String value) { + public Object setValue(Object value) { throw new UnsupportedOperationException(); } }; @@ -1349,14 +1328,14 @@ public int size() { return filterSet; } - private FilteredMap(Map delegate, Predicate filter, String prefix) { + private FilteredMap(Map delegate, Predicate filter, String prefix) { this.delegate = delegate; this.filter = filter; this.prefix = prefix; } @Override - public String get(Object key) { + public Object get(Object key) { if (key instanceof String) { final String theKey = prefix == null ? (String)key : prefix + key; if (filter.test(theKey)) { @@ -1432,4 +1411,21 @@ public void close() throws IOException { delegate.close(); } } + + @Override + public String toString() { + try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + builder.startObject(); + toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true"))); + builder.endObject(); + return builder.string(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private static String toString(Object o) { + return o == null ? null : o.toString(); + } + } diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java index 32c5e7a0da318..1c67318e28286 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java @@ -30,8 +30,6 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; -import java.util.Map; -import java.util.Map.Entry; import java.util.Set; /** @@ -107,10 +105,10 @@ private static Settings filterSettings(Iterable patterns, Settings setti } if (!simpleMatchPatternList.isEmpty()) { String[] simpleMatchPatterns = simpleMatchPatternList.toArray(new String[simpleMatchPatternList.size()]); - Iterator> iterator = builder.internalMap().entrySet().iterator(); + Iterator iterator = builder.keys().iterator(); while (iterator.hasNext()) { - Map.Entry current = iterator.next(); - if (Regex.simpleMatch(simpleMatchPatterns, current.getKey())) { + String key = iterator.next(); + if (Regex.simpleMatch(simpleMatchPatterns, key)) { iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 29395088b3ca3..08d0e6c0ccecf 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -506,7 +506,8 @@ public synchronized boolean updateIndexMetaData(IndexMetaData indexMetaData) { } this.indexMetaData = indexMetaData; final Settings existingSettings = this.settings; - if (existingSettings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE).getAsMap().equals(newSettings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE).getAsMap())) { + if (existingSettings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE) + .equals(newSettings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE))) { // nothing to update, same settings return false; } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java index f53b245e14c36..c6f1bfe7f41d1 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.apache.lucene.analysis.bg.BulgarianAnalyzer; +import org.apache.lucene.analysis.bn.BengaliAnalyzer; import org.apache.lucene.analysis.br.BrazilianAnalyzer; import org.apache.lucene.analysis.ca.CatalanAnalyzer; import org.apache.lucene.analysis.ckb.SoraniAnalyzer; @@ -101,13 +102,8 @@ public static boolean isNoStopwords(Settings settings) { public static CharArraySet parseStemExclusion(Settings settings, CharArraySet defaultStemExclusion) { String value = settings.get("stem_exclusion"); - if (value != null) { - if ("_none_".equals(value)) { - return CharArraySet.EMPTY_SET; - } else { - // LUCENE 4 UPGRADE: Should be settings.getAsBoolean("stem_exclusion_case", false)? - return new CharArraySet(Strings.commaDelimitedListToSet(value), false); - } + if ("_none_".equals(value)) { + return CharArraySet.EMPTY_SET; } String[] stemExclusion = settings.getAsArray("stem_exclusion", null); if (stemExclusion != null) { @@ -124,6 +120,7 @@ public static CharArraySet parseStemExclusion(Settings settings, CharArraySet de namedStopWords.put("_arabic_", ArabicAnalyzer.getDefaultStopSet()); namedStopWords.put("_armenian_", ArmenianAnalyzer.getDefaultStopSet()); namedStopWords.put("_basque_", BasqueAnalyzer.getDefaultStopSet()); + namedStopWords.put("_bengali_", BengaliAnalyzer.getDefaultStopSet()); namedStopWords.put("_brazilian_", BrazilianAnalyzer.getDefaultStopSet()); namedStopWords.put("_bulgarian_", BulgarianAnalyzer.getDefaultStopSet()); namedStopWords.put("_catalan_", CatalanAnalyzer.getDefaultStopSet()); @@ -164,7 +161,7 @@ public static CharArraySet parseWords(Environment env, Settings settings, String if ("_none_".equals(value)) { return CharArraySet.EMPTY_SET; } else { - return resolveNamedWords(Strings.commaDelimitedListToSet(value), namedWords, ignoreCase); + return resolveNamedWords(Arrays.asList(settings.getAsArray(name)), namedWords, ignoreCase); } } List pathLoadedWords = getWordList(env, settings, name); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java new file mode 100644 index 0000000000000..41931833301cf --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.bn.BengaliAnalyzer; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; + +public class BengaliAnalyzerProvider extends AbstractIndexAnalyzerProvider { + + private final BengaliAnalyzer analyzer; + + public BengaliAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + super(indexSettings, name, settings); + analyzer = new BengaliAnalyzer( + Analysis.parseStopWords(env, settings, BengaliAnalyzer.getDefaultStopSet()), + Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET) + ); + analyzer.setVersion(version); + } + + @Override + public BengaliAnalyzer get() { + return this.analyzer; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexOrdinalsFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexOrdinalsFieldData.java index 2e714fc80a12b..8a9fabc9e1354 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexOrdinalsFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexOrdinalsFieldData.java @@ -21,7 +21,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.OrdinalMap; /** @@ -43,8 +43,8 @@ public interface IndexOrdinalsFieldData extends IndexFieldData.Global> scriptFunction; protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, AtomicOrdinalsFieldData[] segmentAfd, - MultiDocValues.OrdinalMap ordinalMap, long memorySizeInBytes, Function> scriptFunction) { super(indexSettings); this.fieldName = fieldName; @@ -113,17 +113,17 @@ public AtomicOrdinalsFieldData load(LeafReaderContext context) { } @Override - public MultiDocValues.OrdinalMap getOrdinalMap() { + public OrdinalMap getOrdinalMap() { return ordinalMap; } private final class Atomic extends AbstractAtomicOrdinalsFieldData { private final AtomicOrdinalsFieldData afd; - private final MultiDocValues.OrdinalMap ordinalMap; + private final OrdinalMap ordinalMap; private final int segmentIndex; - private Atomic(AtomicOrdinalsFieldData afd, MultiDocValues.OrdinalMap ordinalMap, int segmentIndex) { + private Atomic(AtomicOrdinalsFieldData afd, OrdinalMap ordinalMap, int segmentIndex) { super(scriptFunction); this.afd = afd; this.ordinalMap = ordinalMap; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java index 1dbd082f93bc8..d89c6d64d4915 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java @@ -22,7 +22,7 @@ import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.OrdinalMap; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; @@ -53,7 +53,7 @@ protected AbstractIndexOrdinalsFieldData(IndexSettings indexSettings, String fie } @Override - public MultiDocValues.OrdinalMap getOrdinalMap() { + public OrdinalMap getOrdinalMap() { return null; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index 9e6e2e994c9d6..0834d2479f072 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -21,7 +21,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.OrdinalMap; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; @@ -128,7 +128,7 @@ public IndexOrdinalsFieldData localGlobalDirect(DirectoryReader indexReader) thr } @Override - public MultiDocValues.OrdinalMap getOrdinalMap() { + public OrdinalMap getOrdinalMap() { return null; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index b439d53e6921a..6063b8a120491 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -57,6 +57,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder + * The default metric used by fuzzy queries to determine a match is the Damerau-Levenshtein + * distance formula which supports transpositions. Setting transposition to false will + * switch to classic Levenshtein distance.
+ * If not set, Damerau-Levenshtein distance metric will be used. + */ + public MultiMatchQueryBuilder fuzzyTranspositions(boolean fuzzyTranspositions) { + this.fuzzyTranspositions = fuzzyTranspositions; + return this; + } + @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); @@ -573,6 +594,7 @@ public void doXContent(XContentBuilder builder, Params params) throws IOExceptio } builder.field(ZERO_TERMS_QUERY_FIELD.getPreferredName(), zeroTermsQuery.toString()); builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), autoGenerateSynonymsPhraseQuery); + builder.field(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), fuzzyTranspositions); printBoostAndQueryName(builder); builder.endObject(); } @@ -595,6 +617,7 @@ public static MultiMatchQueryBuilder fromXContent(XContentParser parser) throws boolean lenient = DEFAULT_LENIENCY; MatchQuery.ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY; boolean autoGenerateSynonymsPhraseQuery = true; + boolean fuzzyTranspositions = DEFAULT_FUZZY_TRANSPOSITIONS; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; @@ -659,6 +682,8 @@ public static MultiMatchQueryBuilder fromXContent(XContentParser parser) throws queryName = parser.text(); } else if (GENERATE_SYNONYMS_PHRASE_QUERY.match(currentFieldName)) { autoGenerateSynonymsPhraseQuery = parser.booleanValue(); + } else if (FUZZY_TRANSPOSITIONS_FIELD.match(currentFieldName)) { + fuzzyTranspositions = parser.booleanValue(); } else { throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query does not support [" + currentFieldName + "]"); @@ -700,7 +725,8 @@ public static MultiMatchQueryBuilder fromXContent(XContentParser parser) throws .zeroTermsQuery(zeroTermsQuery) .autoGenerateSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery) .boost(boost) - .queryName(queryName); + .queryName(queryName) + .fuzzyTranspositions(fuzzyTranspositions); } private static void parseFieldAndBoost(XContentParser parser, Map fieldsBoosts) throws IOException { @@ -755,6 +781,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { multiMatchQuery.setLenient(lenient); multiMatchQuery.setZeroTermsQuery(zeroTermsQuery); multiMatchQuery.setAutoGenerateSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); + multiMatchQuery.setTranspositions(fuzzyTranspositions); if (useDisMax != null) { // backwards foobar boolean typeUsesDismax = type.tieBreaker() != 1.0f; @@ -775,7 +802,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { protected int doHashCode() { return Objects.hash(value, fieldsBoosts, type, operator, analyzer, slop, fuzziness, prefixLength, maxExpansions, minimumShouldMatch, fuzzyRewrite, useDisMax, tieBreaker, lenient, - cutoffFrequency, zeroTermsQuery, autoGenerateSynonymsPhraseQuery); + cutoffFrequency, zeroTermsQuery, autoGenerateSynonymsPhraseQuery, fuzzyTranspositions); } @Override @@ -796,6 +823,7 @@ protected boolean doEquals(MultiMatchQueryBuilder other) { Objects.equals(lenient, other.lenient) && Objects.equals(cutoffFrequency, other.cutoffFrequency) && Objects.equals(zeroTermsQuery, other.zeroTermsQuery) && - Objects.equals(autoGenerateSynonymsPhraseQuery, other.autoGenerateSynonymsPhraseQuery); + Objects.equals(autoGenerateSynonymsPhraseQuery, other.autoGenerateSynonymsPhraseQuery) && + Objects.equals(fuzzyTranspositions, other.fuzzyTranspositions); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 7b6f591ca6833..154060ec1a5b0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -68,6 +68,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder + * The default metric used by fuzzy queries to determine a match is the Damerau-Levenshtein + * distance formula which supports transpositions. Setting transposition to false will + * switch to classic Levenshtein distance.
+ * If not set, Damerau-Levenshtein distance metric will be used. + */ + public QueryStringQueryBuilder fuzzyTranspositions(boolean fuzzyTranspositions) { + this.fuzzyTranspositions = fuzzyTranspositions; + return this; + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); @@ -706,6 +728,7 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep } builder.field(ESCAPE_FIELD.getPreferredName(), this.escape); builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), autoGenerateSynonymsPhraseQuery); + builder.field(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), fuzzyTranspositions); printBoostAndQueryName(builder); builder.endObject(); } @@ -739,6 +762,8 @@ public static QueryStringQueryBuilder fromXContent(XContentParser parser) throws String rewrite = null; Map fieldsAndWeights = null; boolean autoGenerateSynonymsPhraseQuery = true; + boolean fuzzyTranspositions = DEFAULT_FUZZY_TRANSPOSITIONS; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -813,6 +838,8 @@ public static QueryStringQueryBuilder fromXContent(XContentParser parser) throws queryName = parser.text(); } else if (GENERATE_SYNONYMS_PHRASE_QUERY.match(currentFieldName)) { autoGenerateSynonymsPhraseQuery = parser.booleanValue(); + } else if (FUZZY_TRANSPOSITIONS_FIELD.match(currentFieldName)) { + fuzzyTranspositions = parser.booleanValue(); } else if (AUTO_GENERATE_PHRASE_QUERIES_FIELD.match(currentFieldName)) { // ignore, deprecated setting } else if (LOWERCASE_EXPANDED_TERMS_FIELD.match(currentFieldName)) { @@ -866,6 +893,7 @@ public static QueryStringQueryBuilder fromXContent(XContentParser parser) throws queryStringQuery.boost(boost); queryStringQuery.queryName(queryName); queryStringQuery.autoGenerateSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); + queryStringQuery.fuzzyTranspositions(fuzzyTranspositions); return queryStringQuery; } @@ -900,7 +928,8 @@ protected boolean doEquals(QueryStringQueryBuilder other) { Objects.equals(timeZone.getID(), other.timeZone.getID()) && Objects.equals(escape, other.escape) && Objects.equals(maxDeterminizedStates, other.maxDeterminizedStates) && - Objects.equals(autoGenerateSynonymsPhraseQuery, other.autoGenerateSynonymsPhraseQuery); + Objects.equals(autoGenerateSynonymsPhraseQuery, other.autoGenerateSynonymsPhraseQuery) && + Objects.equals(fuzzyTranspositions, other.fuzzyTranspositions); } @Override @@ -909,7 +938,8 @@ protected int doHashCode() { quoteFieldSuffix, allowLeadingWildcard, analyzeWildcard, enablePositionIncrements, fuzziness, fuzzyPrefixLength, fuzzyMaxExpansions, fuzzyRewrite, phraseSlop, type, tieBreaker, rewrite, minimumShouldMatch, lenient, - timeZone == null ? 0 : timeZone.getID(), escape, maxDeterminizedStates, autoGenerateSynonymsPhraseQuery); + timeZone == null ? 0 : timeZone.getID(), escape, maxDeterminizedStates, autoGenerateSynonymsPhraseQuery, + fuzzyTranspositions); } @Override @@ -979,6 +1009,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { queryParser.setTimeZone(timeZone); queryParser.setMaxDeterminizedStates(maxDeterminizedStates); queryParser.setAutoGenerateMultiTermSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); + queryParser.setFuzzyTranspositions(fuzzyTranspositions); Query query; try { diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index df0825b2672aa..3a9a0c3736b93 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.Query; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; @@ -89,6 +90,12 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder + * The default metric used by fuzzy queries to determine a match is the Damerau-Levenshtein + * distance formula which supports transpositions. Setting transposition to false will + * switch to classic Levenshtein distance.
+ * If not set, Damerau-Levenshtein distance metric will be used. + */ + public SimpleQueryStringBuilder fuzzyTranspositions(boolean fuzzyTranspositions) { + this.settings.fuzzyTranspositions(fuzzyTranspositions); + return this; + } @Override protected Query doToQuery(QueryShardContext context) throws IOException { @@ -460,6 +509,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.field(MINIMUM_SHOULD_MATCH_FIELD.getPreferredName(), minimumShouldMatch); } builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), settings.autoGenerateSynonymsPhraseQuery()); + builder.field(FUZZY_PREFIX_LENGTH_FIELD.getPreferredName(), settings.fuzzyPrefixLength()); + builder.field(FUZZY_MAX_EXPANSIONS_FIELD.getPreferredName(), settings.fuzzyMaxExpansions()); + builder.field(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), settings.fuzzyTranspositions()); printBoostAndQueryName(builder); builder.endObject(); } @@ -478,6 +530,9 @@ public static SimpleQueryStringBuilder fromXContent(XContentParser parser) throw boolean analyzeWildcard = SimpleQueryStringBuilder.DEFAULT_ANALYZE_WILDCARD; String quoteFieldSuffix = null; boolean autoGenerateSynonymsPhraseQuery = true; + int fuzzyPrefixLenght = SimpleQueryStringBuilder.DEFAULT_FUZZY_PREFIX_LENGTH; + int fuzzyMaxExpansions = SimpleQueryStringBuilder.DEFAULT_FUZZY_MAX_EXPANSIONS; + boolean fuzzyTranspositions = SimpleQueryStringBuilder.DEFAULT_FUZZY_TRANSPOSITIONS; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -532,6 +587,12 @@ public static SimpleQueryStringBuilder fromXContent(XContentParser parser) throw // Ignore deprecated option } else if (GENERATE_SYNONYMS_PHRASE_QUERY.match(currentFieldName)) { autoGenerateSynonymsPhraseQuery = parser.booleanValue(); + } else if (FUZZY_PREFIX_LENGTH_FIELD.match(currentFieldName)) { + fuzzyPrefixLenght = parser.intValue(); + } else if (FUZZY_MAX_EXPANSIONS_FIELD.match(currentFieldName)) { + fuzzyMaxExpansions = parser.intValue(); + } else if (FUZZY_TRANSPOSITIONS_FIELD.match(currentFieldName)) { + fuzzyTranspositions = parser.booleanValue(); } else { throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME + "] unsupported field [" + parser.currentName() + "]"); @@ -558,6 +619,9 @@ public static SimpleQueryStringBuilder fromXContent(XContentParser parser) throw } qb.analyzeWildcard(analyzeWildcard).boost(boost).quoteFieldSuffix(quoteFieldSuffix); qb.autoGenerateSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); + qb.fuzzyPrefixLength(fuzzyPrefixLenght); + qb.fuzzyMaxExpansions(fuzzyMaxExpansions); + qb.fuzzyTranspositions(fuzzyTranspositions); return qb; } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index dcd1399bf5159..fe7d097638f31 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -43,8 +43,9 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; +import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryShardContext; @@ -346,22 +347,23 @@ public boolean needsScores() { @Override protected NumericDoubleValues distance(LeafReaderContext context) { final MultiGeoPointValues geoPointValues = fieldData.load(context).getGeoPointValues(); - return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() { - @Override - public int docValueCount() { - return geoPointValues.docValueCount(); - } - + return mode.select(new SortingNumericDoubleValues() { @Override public boolean advanceExact(int docId) throws IOException { - return geoPointValues.advanceExact(docId); - } - - @Override - public double nextValue() throws IOException { - GeoPoint other = geoPointValues.nextValue(); - return Math.max(0.0d, - distFunction.calculate(origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS) - offset); + if (geoPointValues.advanceExact(docId)) { + int n = geoPointValues.docValueCount(); + resize(n); + for (int i = 0; i < n; i++) { + GeoPoint other = geoPointValues.nextValue(); + double distance = distFunction.calculate( + origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS); + values[i] = Math.max(0.0d, distance - offset); + } + sort(); + return true; + } else { + return false; + } } }, 0.0); } @@ -427,20 +429,20 @@ public boolean needsScores() { @Override protected NumericDoubleValues distance(LeafReaderContext context) { final SortedNumericDoubleValues doubleValues = fieldData.load(context).getDoubleValues(); - return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() { + return mode.select(new SortingNumericDoubleValues() { @Override - public int docValueCount() { - return doubleValues.docValueCount(); - } - - @Override - public boolean advanceExact(int doc) throws IOException { - return doubleValues.advanceExact(doc); - } - - @Override - public double nextValue() throws IOException { - return Math.max(0.0d, Math.abs(doubleValues.nextValue() - origin) - offset); + public boolean advanceExact(int docId) throws IOException { + if (doubleValues.advanceExact(docId)) { + int n = doubleValues.docValueCount(); + resize(n); + for (int i = 0; i < n; i++) { + values[i] = Math.max(0.0d, Math.abs(doubleValues.nextValue() - origin) - offset); + } + sort(); + return true; + } else { + return false; + } } }, 0.0); } @@ -542,10 +544,11 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE if (distance.advanceExact(docId) == false) { return Explanation.noMatch("No value for the distance"); } + double value = distance.doubleValue(); return Explanation.match( (float) score(docId, subQueryScore.getValue()), "Function for field " + getFieldName() + ":", - func.explainFunction(getDistanceString(ctx, docId), distance.doubleValue(), scale)); + func.explainFunction(getDistanceString(ctx, docId), value, scale)); } }; } diff --git a/core/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/core/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 339aeebf0a0ec..5f453d49ab5cc 100644 --- a/core/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -93,6 +93,7 @@ public class QueryStringQueryParser extends XQueryParser { private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions; private MappedFieldType currentFieldType; private MultiTermQuery.RewriteMethod fuzzyRewriteMethod; + private boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions; /** * @param context The query shard context. @@ -236,6 +237,14 @@ public void setAutoGenerateMultiTermSynonymsPhraseQuery(boolean enable) { queryBuilder.setAutoGenerateSynonymsPhraseQuery(enable); } + /** + * @param fuzzyTranspositions Sets whether transpositions are supported in fuzzy queries. + * Defaults to {@link FuzzyQuery#defaultTranspositions}. + */ + public void setFuzzyTranspositions(boolean fuzzyTranspositions) { + this.fuzzyTranspositions = fuzzyTranspositions; + } + private Query applyBoost(Query q, Float boost) { if (boost != null && boost != 1f) { return new BoostQuery(q, boost); @@ -442,7 +451,7 @@ private Query getFuzzyQuerySingle(String field, String termStr, float minSimilar Analyzer normalizer = forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer; BytesRef term = termStr == null ? null : normalizer.normalize(field, termStr); return currentFieldType.fuzzyQuery(term, Fuzziness.fromEdits((int) minSimilarity), - getFuzzyPrefixLength(), fuzzyMaxExpansions, FuzzyQuery.defaultTranspositions); + getFuzzyPrefixLength(), fuzzyMaxExpansions, fuzzyTranspositions); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); @@ -455,7 +464,7 @@ private Query getFuzzyQuerySingle(String field, String termStr, float minSimilar protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) { int numEdits = Fuzziness.build(minimumSimilarity).asDistance(term.text()); FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, - fuzzyMaxExpansions, FuzzyQuery.defaultTranspositions); + fuzzyMaxExpansions, fuzzyTranspositions); QueryParsers.setRewriteMethod(query, fuzzyRewriteMethod); return query; } diff --git a/core/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java b/core/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java index fc7f1349e5cbf..9f91b16359287 100644 --- a/core/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java @@ -28,7 +28,6 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; -import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; @@ -132,8 +131,8 @@ public Query newFuzzyQuery(String text, int fuzziness) { } try { final BytesRef term = getAnalyzer(ft).normalize(fieldName, text); - Query query = ft.fuzzyQuery(term, Fuzziness.fromEdits(fuzziness), FuzzyQuery.defaultPrefixLength, - FuzzyQuery.defaultMaxExpansions, FuzzyQuery.defaultTranspositions); + Query query = ft.fuzzyQuery(term, Fuzziness.fromEdits(fuzziness), settings.fuzzyPrefixLength, + settings.fuzzyMaxExpansions, settings.fuzzyTranspositions); disjuncts.add(wrapWithBoost(query, entry.getValue())); } catch (RuntimeException e) { rethrowUnlessLenient(e); @@ -293,6 +292,12 @@ public static class Settings { private String quoteFieldSuffix = null; /** Whether phrase queries should be automatically generated for multi terms synonyms. */ private boolean autoGenerateSynonymsPhraseQuery = true; + /** Prefix length in fuzzy queries.*/ + private int fuzzyPrefixLength = SimpleQueryStringBuilder.DEFAULT_FUZZY_PREFIX_LENGTH; + /** The number of terms fuzzy queries will expand to.*/ + private int fuzzyMaxExpansions = SimpleQueryStringBuilder.DEFAULT_FUZZY_MAX_EXPANSIONS; + /** Whether transpositions are supported in fuzzy queries.*/ + private boolean fuzzyTranspositions = SimpleQueryStringBuilder.DEFAULT_FUZZY_TRANSPOSITIONS; /** * Generates default {@link Settings} object (uses ROOT locale, does @@ -306,6 +311,9 @@ public Settings(Settings other) { this.analyzeWildcard = other.analyzeWildcard; this.quoteFieldSuffix = other.quoteFieldSuffix; this.autoGenerateSynonymsPhraseQuery = other.autoGenerateSynonymsPhraseQuery; + this.fuzzyPrefixLength = other.fuzzyPrefixLength; + this.fuzzyMaxExpansions = other.fuzzyMaxExpansions; + this.fuzzyTranspositions = other.fuzzyTranspositions; } /** Specifies whether to use lenient parsing, defaults to false. */ @@ -355,9 +363,34 @@ public boolean autoGenerateSynonymsPhraseQuery() { return autoGenerateSynonymsPhraseQuery; } + public int fuzzyPrefixLength() { + return fuzzyPrefixLength; + } + + public void fuzzyPrefixLength(int fuzzyPrefixLength) { + this.fuzzyPrefixLength = fuzzyPrefixLength; + } + + public int fuzzyMaxExpansions() { + return fuzzyMaxExpansions; + } + + public void fuzzyMaxExpansions(int fuzzyMaxExpansions) { + this.fuzzyMaxExpansions = fuzzyMaxExpansions; + } + + public boolean fuzzyTranspositions() { + return fuzzyTranspositions; + } + + public void fuzzyTranspositions(boolean fuzzyTranspositions) { + this.fuzzyTranspositions = fuzzyTranspositions; + } + @Override public int hashCode() { - return Objects.hash(lenient, analyzeWildcard, quoteFieldSuffix, autoGenerateSynonymsPhraseQuery); + return Objects.hash(lenient, analyzeWildcard, quoteFieldSuffix, autoGenerateSynonymsPhraseQuery, + fuzzyPrefixLength, fuzzyMaxExpansions, fuzzyTranspositions); } @Override @@ -372,7 +405,10 @@ public boolean equals(Object obj) { return Objects.equals(lenient, other.lenient) && Objects.equals(analyzeWildcard, other.analyzeWildcard) && Objects.equals(quoteFieldSuffix, other.quoteFieldSuffix) && - Objects.equals(autoGenerateSynonymsPhraseQuery, other.autoGenerateSynonymsPhraseQuery); + Objects.equals(autoGenerateSynonymsPhraseQuery, other.autoGenerateSynonymsPhraseQuery) && + Objects.equals(fuzzyPrefixLength, fuzzyPrefixLength) && + Objects.equals(fuzzyMaxExpansions, fuzzyMaxExpansions) && + Objects.equals(fuzzyTranspositions, fuzzyTranspositions); } } } diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index ee87aa5e630bd..2d9e8e78b7768 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.analysis.ArabicAnalyzerProvider; import org.elasticsearch.index.analysis.ArmenianAnalyzerProvider; import org.elasticsearch.index.analysis.BasqueAnalyzerProvider; +import org.elasticsearch.index.analysis.BengaliAnalyzerProvider; import org.elasticsearch.index.analysis.BrazilianAnalyzerProvider; import org.elasticsearch.index.analysis.BulgarianAnalyzerProvider; import org.elasticsearch.index.analysis.CatalanAnalyzerProvider; @@ -270,6 +271,7 @@ private NamedRegistry>> setupAnalyzers(List analyzers.register("arabic", ArabicAnalyzerProvider::new); analyzers.register("armenian", ArmenianAnalyzerProvider::new); analyzers.register("basque", BasqueAnalyzerProvider::new); + analyzers.register("bengali", BengaliAnalyzerProvider::new); analyzers.register("brazilian", BrazilianAnalyzerProvider::new); analyzers.register("bulgarian", BulgarianAnalyzerProvider::new); analyzers.register("catalan", CatalanAnalyzerProvider::new); diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java index 6c4a3cc2578db..3c286f7dd5ec5 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java @@ -22,6 +22,7 @@ import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.apache.lucene.analysis.bg.BulgarianAnalyzer; +import org.apache.lucene.analysis.bn.BengaliAnalyzer; import org.apache.lucene.analysis.br.BrazilianAnalyzer; import org.apache.lucene.analysis.ca.CatalanAnalyzer; import org.apache.lucene.analysis.cjk.CJKAnalyzer; @@ -183,6 +184,15 @@ protected Analyzer create(Version version) { } }, + BENGALI { + @Override + protected Analyzer create(Version version) { + Analyzer a = new BengaliAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + } + }, + BRAZILIAN { @Override protected Analyzer create(Version version) { diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index 43ef51658b726..f9423f1b13cd1 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -36,8 +36,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; public class OsProbe { @@ -382,12 +380,70 @@ List readSysFsCgroupCpuAcctCpuStat(final String controlGroup) throws IOE } /** - * Checks if cgroup stats are available by checking for the existence of {@code /proc/self/cgroup}, {@code /sys/fs/cgroup/cpu}, and - * {@code /sys/fs/cgroup/cpuacct}. + * The maximum amount of user memory (including file cache). + * If there is no limit then some Linux versions return the maximum value that can be stored in an + * unsigned 64 bit number, and this will overflow a long, hence the result type is String. + * (The alternative would have been BigInteger but then it would not be possible to index + * the OS stats document into Elasticsearch without losing information, as BigInteger is + * not a supported Elasticsearch type.) + * + * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem + * @return the maximum amount of user memory (including file cache) + * @throws IOException if an I/O exception occurs reading {@code memory.limit_in_bytes} for the control group + */ + private String getCgroupMemoryLimitInBytes(final String controlGroup) throws IOException { + return readSysFsCgroupMemoryLimitInBytes(controlGroup); + } + + /** + * Returns the line from {@code memory.limit_in_bytes} for the control group to which the Elasticsearch process belongs for the + * {@code memory} subsystem. This line represents the maximum amount of user memory (including file cache). + * + * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem + * @return the line from {@code memory.limit_in_bytes} + * @throws IOException if an I/O exception occurs reading {@code memory.limit_in_bytes} for the control group + */ + @SuppressForbidden(reason = "access /sys/fs/cgroup/memory") + String readSysFsCgroupMemoryLimitInBytes(final String controlGroup) throws IOException { + return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.limit_in_bytes")); + } + + /** + * The total current memory usage by processes in the cgroup (in bytes). + * If there is no limit then some Linux versions return the maximum value that can be stored in an + * unsigned 64 bit number, and this will overflow a long, hence the result type is String. + * (The alternative would have been BigInteger but then it would not be possible to index + * the OS stats document into Elasticsearch without losing information, as BigInteger is + * not a supported Elasticsearch type.) + * + * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem + * @return the total current memory usage by processes in the cgroup (in bytes) + * @throws IOException if an I/O exception occurs reading {@code memory.limit_in_bytes} for the control group + */ + private String getCgroupMemoryUsageInBytes(final String controlGroup) throws IOException { + return readSysFsCgroupMemoryUsageInBytes(controlGroup); + } + + /** + * Returns the line from {@code memory.usage_in_bytes} for the control group to which the Elasticsearch process belongs for the + * {@code memory} subsystem. This line represents the total current memory usage by processes in the cgroup (in bytes). + * + * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem + * @return the line from {@code memory.usage_in_bytes} + * @throws IOException if an I/O exception occurs reading {@code memory.usage_in_bytes} for the control group + */ + @SuppressForbidden(reason = "access /sys/fs/cgroup/memory") + String readSysFsCgroupMemoryUsageInBytes(final String controlGroup) throws IOException { + return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.usage_in_bytes")); + } + + /** + * Checks if cgroup stats are available by checking for the existence of {@code /proc/self/cgroup}, {@code /sys/fs/cgroup/cpu}, + * {@code /sys/fs/cgroup/cpuacct} and {@code /sys/fs/cgroup/memory}. * * @return {@code true} if the stats are available, otherwise {@code false} */ - @SuppressForbidden(reason = "access /proc/self/cgroup, /sys/fs/cgroup/cpu, and /sys/fs/cgroup/cpuacct") + @SuppressForbidden(reason = "access /proc/self/cgroup, /sys/fs/cgroup/cpu, /sys/fs/cgroup/cpuacct and /sys/fs/cgroup/memory") boolean areCgroupStatsAvailable() { if (!Files.exists(PathUtils.get("/proc/self/cgroup"))) { return false; @@ -398,6 +454,9 @@ boolean areCgroupStatsAvailable() { if (!Files.exists(PathUtils.get("/sys/fs/cgroup/cpuacct"))) { return false; } + if (!Files.exists(PathUtils.get("/sys/fs/cgroup/memory"))) { + return false; + } return true; } @@ -424,13 +483,21 @@ private OsStats.Cgroup getCgroup() { final long cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); final OsStats.Cgroup.CpuStat cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); + final String memoryControlGroup = controllerMap.get("memory"); + assert memoryControlGroup != null; + final String cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); + final String cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); + return new OsStats.Cgroup( cpuAcctControlGroup, cgroupCpuAcctUsageNanos, cpuControlGroup, cgroupCpuAcctCpuCfsPeriodMicros, cgroupCpuAcctCpuCfsQuotaMicros, - cpuStat); + cpuStat, + memoryControlGroup, + cgroupMemoryLimitInBytes, + cgroupMemoryUsageInBytes); } } catch (final IOException e) { logger.debug("error reading control group stats", e); diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java b/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java index dfca123d0fa8f..60502679c2131 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java @@ -294,6 +294,10 @@ public static class Cgroup implements Writeable, ToXContentFragment { private final long cpuCfsPeriodMicros; private final long cpuCfsQuotaMicros; private final CpuStat cpuStat; + // These will be null for nodes running versions prior to 6.1.0 + private final String memoryControlGroup; + private final String memoryLimitInBytes; + private final String memoryUsageInBytes; /** * The control group for the {@code cpuacct} subsystem. @@ -355,19 +359,57 @@ public CpuStat getCpuStat() { return cpuStat; } + /** + * The control group for the {@code memory} subsystem. + * + * @return the control group + */ + public String getMemoryControlGroup() { + return memoryControlGroup; + } + + /** + * The maximum amount of user memory (including file cache). + * This is stored as a String because the value can be too big to fit in a + * long. (The alternative would have been BigInteger but then + * it would not be possible to index the OS stats document into Elasticsearch without + * losing information, as BigInteger is not a supported Elasticsearch type.) + * + * @return the maximum amount of user memory (including file cache). + */ + public String getMemoryLimitInBytes() { + return memoryLimitInBytes; + } + + /** + * The total current memory usage by processes in the cgroup (in bytes). + * This is stored as a String for consistency with memoryLimitInBytes. + * + * @return the total current memory usage by processes in the cgroup (in bytes). + */ + public String getMemoryUsageInBytes() { + return memoryUsageInBytes; + } + public Cgroup( final String cpuAcctControlGroup, final long cpuAcctUsageNanos, final String cpuControlGroup, final long cpuCfsPeriodMicros, final long cpuCfsQuotaMicros, - final CpuStat cpuStat) { + final CpuStat cpuStat, + final String memoryControlGroup, + final String memoryLimitInBytes, + final String memoryUsageInBytes) { this.cpuAcctControlGroup = Objects.requireNonNull(cpuAcctControlGroup); this.cpuAcctUsageNanos = cpuAcctUsageNanos; this.cpuControlGroup = Objects.requireNonNull(cpuControlGroup); this.cpuCfsPeriodMicros = cpuCfsPeriodMicros; this.cpuCfsQuotaMicros = cpuCfsQuotaMicros; this.cpuStat = Objects.requireNonNull(cpuStat); + this.memoryControlGroup = memoryControlGroup; + this.memoryLimitInBytes = memoryLimitInBytes; + this.memoryUsageInBytes = memoryUsageInBytes; } Cgroup(final StreamInput in) throws IOException { @@ -377,6 +419,15 @@ public Cgroup( cpuCfsPeriodMicros = in.readLong(); cpuCfsQuotaMicros = in.readLong(); cpuStat = new CpuStat(in); + if (in.getVersion().onOrAfter(Version.V_6_1_0)) { + memoryControlGroup = in.readOptionalString(); + memoryLimitInBytes = in.readOptionalString(); + memoryUsageInBytes = in.readOptionalString(); + } else { + memoryControlGroup = null; + memoryLimitInBytes = null; + memoryUsageInBytes = null; + } } @Override @@ -387,6 +438,11 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeLong(cpuCfsPeriodMicros); out.writeLong(cpuCfsQuotaMicros); cpuStat.writeTo(out); + if (out.getVersion().onOrAfter(Version.V_6_1_0)) { + out.writeOptionalString(memoryControlGroup); + out.writeOptionalString(memoryLimitInBytes); + out.writeOptionalString(memoryUsageInBytes); + } } @Override @@ -407,6 +463,19 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa cpuStat.toXContent(builder, params); } builder.endObject(); + if (memoryControlGroup != null) { + builder.startObject("memory"); + { + builder.field("control_group", memoryControlGroup); + if (memoryLimitInBytes != null) { + builder.field("limit_in_bytes", memoryLimitInBytes); + } + if (memoryUsageInBytes != null) { + builder.field("usage_in_bytes", memoryUsageInBytes); + } + } + builder.endObject(); + } } builder.endObject(); return builder; diff --git a/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java index 93c5a18222c8e..a2c7663ec9e15 100644 --- a/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java @@ -134,7 +134,7 @@ static void initializeSettings(final Settings.Builder output, final Settings inp private static void finalizeSettings(Settings.Builder output, Terminal terminal) { // allow to force set properties based on configuration of the settings provided List forcedSettings = new ArrayList<>(); - for (String setting : output.internalMap().keySet()) { + for (String setting : output.keys()) { if (setting.startsWith("force.")) { forcedSettings.add(setting); } @@ -156,13 +156,13 @@ private static void finalizeSettings(Settings.Builder output, Terminal terminal) private static void replacePromptPlaceholders(Settings.Builder settings, Terminal terminal) { List secretToPrompt = new ArrayList<>(); List textToPrompt = new ArrayList<>(); - for (Map.Entry entry : settings.internalMap().entrySet()) { - switch (entry.getValue()) { + for (String key : settings.keys()) { + switch (settings.get(key)) { case SECRET_PROMPT_VALUE: - secretToPrompt.add(entry.getKey()); + secretToPrompt.add(key); break; case TEXT_PROMPT_VALUE: - textToPrompt.add(entry.getKey()); + textToPrompt.add(key); break; } } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index a58660120c129..a41f3bb4d3ede 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -186,7 +186,7 @@ public Settings updatedSettings() { final Settings.Builder builder = Settings.builder(); for (Tuple plugin : plugins) { Settings settings = plugin.v2().additionalSettings(); - for (String setting : settings.getAsMap().keySet()) { + for (String setting : settings.keySet()) { String oldPlugin = foundSettings.put(setting, plugin.v1().getName()); if (oldPlugin != null) { throw new IllegalArgumentException("Cannot have additional setting [" + setting + "] " + diff --git a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java index 1195644e328a0..2d85c379f54fd 100644 --- a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -104,16 +104,6 @@ protected double pick(SortedNumericDoubleValues values, double missingValue, Doc } return totalCount > 0 ? totalValue : missingValue; } - - @Override - protected double pick(UnsortedNumericDoubleValues values) throws IOException { - final int count = values.docValueCount(); - double total = 0; - for (int index = 0; index < count; ++index) { - total += values.nextValue(); - } - return total; - } }, /** @@ -177,16 +167,6 @@ protected double pick(SortedNumericDoubleValues values, double missingValue, Doc } return totalValue/totalCount; } - - @Override - protected double pick(UnsortedNumericDoubleValues values) throws IOException { - final int count = values.docValueCount(); - double total = 0; - for (int index = 0; index < count; ++index) { - total += values.nextValue(); - } - return total/count; - } }, /** @@ -303,16 +283,6 @@ protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc } return hasValue ? ord : -1; } - - @Override - protected double pick(UnsortedNumericDoubleValues values) throws IOException { - int count = values.docValueCount(); - double min = Double.POSITIVE_INFINITY; - for (int index = 0; index < count; ++index) { - min = Math.min(values.nextValue(), min); - } - return min; - } }, /** @@ -419,16 +389,6 @@ protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc } return ord; } - - @Override - protected double pick(UnsortedNumericDoubleValues values) throws IOException { - int count = values.docValueCount(); - double max = Double.NEGATIVE_INFINITY; - for (int index = 0; index < count; ++index) { - max = Math.max(values.nextValue(), max); - } - return max; - } }; /** @@ -905,43 +865,6 @@ protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc throw new IllegalArgumentException("Unsupported sort mode: " + this); } - /** - * Return a {@link NumericDoubleValues} instance that can be used to sort documents - * with this mode and the provided values. When a document has no value, - * missingValue is returned. - * - * Allowed Modes: SUM, AVG, MIN, MAX - */ - public NumericDoubleValues select(final UnsortedNumericDoubleValues values, final double missingValue) { - return new NumericDoubleValues() { - private boolean hasValue; - - @Override - public boolean advanceExact(int doc) throws IOException { - hasValue = values.advanceExact(doc); - return true; - } - @Override - public double doubleValue() throws IOException { - return hasValue ? pick(values) : missingValue; - } - }; - } - - protected double pick(UnsortedNumericDoubleValues values) throws IOException { - throw new IllegalArgumentException("Unsupported sort mode: " + this); - } - - /** - * Interface allowing custom value generators to be used in MultiValueMode. - */ - // TODO: why do we need it??? - public interface UnsortedNumericDoubleValues { - boolean advanceExact(int doc) throws IOException; - int docValueCount() throws IOException; - double nextValue() throws IOException; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeEnum(this); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index 21cd2347cc61b..aa94bb762596a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -321,8 +321,9 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { do { final IteratorAndCurrent top = pq.top(); - if (top.current.key != key) { - // the key changes, reduce what we already buffered and reset the buffer for current buckets + if (Double.compare(top.current.key, key) != 0) { + // The key changes, reduce what we already buffered and reset the buffer for current buckets. + // Using Double.compare instead of != to handle NaN correctly. final Bucket reduced = currentBuckets.get(0).reduce(currentBuckets, reduceContext); if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { reducedBuckets.add(reduced); @@ -335,7 +336,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { if (top.iterator.hasNext()) { final Bucket next = top.iterator.next(); - assert next.key > top.current.key : "shards must return data sorted by key"; + assert Double.compare(next.key, top.current.key) > 0 : "shards must return data sorted by key"; top.current = next; pq.updateTop(); } else { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java index 2fb8f6789cea7..d3b1da7c9376e 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.ConjunctionDISI; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TopDocs; @@ -126,15 +125,15 @@ public static void intersect(Weight weight, Weight innerHitQueryWeight, Collecto if (scorerSupplier == null) { return; } - // use random access since this scorer will be consumed on a minority of documents - Scorer scorer = scorerSupplier.get(true); + // use low leadCost since this scorer will be consumed on a minority of documents + Scorer scorer = scorerSupplier.get(0); ScorerSupplier innerHitQueryScorerSupplier = innerHitQueryWeight.scorerSupplier(ctx); if (innerHitQueryScorerSupplier == null) { return; } - // use random access since this scorer will be consumed on a minority of documents - Scorer innerHitQueryScorer = innerHitQueryScorerSupplier.get(true); + // use low loadCost since this scorer will be consumed on a minority of documents + Scorer innerHitQueryScorer = innerHitQueryScorerSupplier.get(0); final LeafCollector leafCollector; try { diff --git a/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java b/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java index 7cb50b292194d..bd5fd5e23a2f0 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java +++ b/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java @@ -54,7 +54,7 @@ public Scorer scorer(LeafReaderContext context) throws IOException { if (supplier == null) { return null; } - return supplier.get(false); + return supplier.get(Long.MAX_VALUE); } @Override @@ -75,10 +75,10 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti return new ScorerSupplier() { @Override - public Scorer get(boolean randomAccess) throws IOException { + public Scorer get(long loadCost) throws IOException { timer.start(); try { - return new ProfileScorer(weight, subQueryScorerSupplier.get(randomAccess), profile); + return new ProfileScorer(weight, subQueryScorerSupplier.get(loadCost), profile); } finally { timer.stop(); } diff --git a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java index 55cf387ba42f7..389b81ffcbad4 100644 --- a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -131,21 +130,25 @@ public static FieldDoc buildFieldDoc(SortAndFormats sort, Object[] values) { return new FieldDoc(Integer.MAX_VALUE, 0, fieldValues); } - private static SortField.Type extractSortType(SortField sortField) { - if (sortField instanceof SortedSetSortField) { + /** + * Returns the inner {@link SortField.Type} expected for this sort field. + */ + static SortField.Type extractSortType(SortField sortField) { + if (sortField.getComparatorSource() instanceof IndexFieldData.XFieldComparatorSource) { + return ((IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource()).reducedType(); + } else if (sortField instanceof SortedSetSortField) { return SortField.Type.STRING; } else if (sortField instanceof SortedNumericSortField) { return ((SortedNumericSortField) sortField).getNumericType(); + } else if ("LatLonPointSortField".equals(sortField.getClass().getSimpleName())) { + // for geo distance sorting + return SortField.Type.DOUBLE; } else { return sortField.getType(); } } - private static Object convertValueFromSortField(Object value, SortField sortField, DocValueFormat format) { - if (sortField.getComparatorSource() instanceof IndexFieldData.XFieldComparatorSource) { - IndexFieldData.XFieldComparatorSource cmpSource = (IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource(); - return convertValueFromSortType(sortField.getField(), cmpSource.reducedType(), value, format); - } + static Object convertValueFromSortField(Object value, SortField sortField, DocValueFormat format) { SortField.Type sortType = extractSortType(sortField); return convertValueFromSortType(sortField.getField(), sortType, value, format); } diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java index 0fb7c372b00df..6bf731f2936d9 100644 --- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -183,7 +183,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i key -> intSetting(key, -1, -1, Setting.Property.NodeScope)); private static final long NINETY_PER_HEAP_SIZE = (long) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * 0.9); - private static final int PING_DATA_SIZE = -1; + public static final int PING_DATA_SIZE = -1; private final CircuitBreakerService circuitBreakerService; // package visibility for tests protected final ScheduledPing scheduledPing; @@ -441,7 +441,8 @@ public Channel channel(TransportRequestOptions.Type type) { public void close() throws IOException { if (closed.compareAndSet(false, true)) { try { - closeChannels(Arrays.stream(channels).filter(Objects::nonNull).collect(Collectors.toList()), false, true); + closeChannels(Arrays.stream(channels).filter(Objects::nonNull).collect(Collectors.toList()), false, + lifecycle.stopped()); } finally { transportService.onConnectionClosed(this); } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java b/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java index e08d89d181f71..8c48f08874350 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.transport; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -27,6 +26,8 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.function.Function; +import java.util.function.Supplier; /** * TransportActionProxy allows an arbitrary action to be executed on a defined target node while the initial request is sent to a second @@ -41,19 +42,21 @@ private static class ProxyRequestHandler implements Tran private final TransportService service; private final String action; - private final Supplier responseFactory; + private final Function> responseFunction; - ProxyRequestHandler(TransportService service, String action, Supplier responseFactory) { + ProxyRequestHandler(TransportService service, String action, Function> responseFunction) { this.service = service; this.action = action; - this.responseFactory = responseFactory; + this.responseFunction = responseFunction; } @Override public void messageReceived(T request, TransportChannel channel) throws Exception { DiscoveryNode targetNode = request.targetNode; TransportRequest wrappedRequest = request.wrapped; - service.sendRequest(targetNode, action, wrappedRequest, new ProxyResponseHandler<>(channel, responseFactory)); + service.sendRequest(targetNode, action, wrappedRequest, + new ProxyResponseHandler<>(channel, responseFunction.apply(wrappedRequest))); } } @@ -126,12 +129,24 @@ public void writeTo(StreamOutput out) throws IOException { } /** - * Registers a proxy request handler that allows to forward requests for the given action to another node. + * Registers a proxy request handler that allows to forward requests for the given action to another node. To be used when the + * response type changes based on the upcoming request (quite rare) + */ + public static void registerProxyAction(TransportService service, String action, + Function> responseFunction) { + RequestHandlerRegistry requestHandler = service.getRequestHandler(action); + service.registerRequestHandler(getProxyAction(action), () -> new ProxyRequest(requestHandler::newRequest), ThreadPool.Names.SAME, + true, false, new ProxyRequestHandler<>(service, action, responseFunction)); + } + + /** + * Registers a proxy request handler that allows to forward requests for the given action to another node. To be used when the + * response type is always the same (most of the cases). */ public static void registerProxyAction(TransportService service, String action, Supplier responseSupplier) { RequestHandlerRegistry requestHandler = service.getRequestHandler(action); service.registerRequestHandler(getProxyAction(action), () -> new ProxyRequest(requestHandler::newRequest), ThreadPool.Names.SAME, - true, false, new ProxyRequestHandler<>(service, action, responseSupplier)); + true, false, new ProxyRequestHandler<>(service, action, request -> responseSupplier)); } private static final String PROXY_ACTION_PREFIX = "internal:transport/proxy/"; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportStatus.java b/core/src/main/java/org/elasticsearch/transport/TransportStatus.java index 39472cbe3cd0d..2f5f6d6bd9bb5 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportStatus.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportStatus.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport; -final class TransportStatus { +public final class TransportStatus { private static final byte STATUS_REQRES = 1 << 0; private static final byte STATUS_ERROR = 1 << 1; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 001af2b7f4c85..deb9a88e4fd01 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.1.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-7.0.0.jar}" { +grant codeBase "${codebase.lucene-core-7.1.0-snapshot-f33ed4ba12a.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; @@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-7.0.0.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.lucene-misc-7.0.0.jar}" { +grant codeBase "${codebase.lucene-misc-7.1.0-snapshot-f33ed4ba12a.jar}" { // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper permission java.nio.file.LinkPermission "hard"; }; @@ -129,4 +129,6 @@ grant { permission java.io.FilePermission "/sys/fs/cgroup/cpu/-", "read"; permission java.io.FilePermission "/sys/fs/cgroup/cpuacct", "read"; permission java.io.FilePermission "/sys/fs/cgroup/cpuacct/-", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/memory", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/memory/-", "read"; }; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index b8d868b3cfd61..c5a9a2b00d312 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-7.0.0.jar}" { +grant codeBase "${codebase.lucene-test-framework-7.1.0-snapshot-f33ed4ba12a.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed for testing hardlinks in StoreRecoveryTests since we install MockFS diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 29a7918a70ebb..338ffe06fb8ca 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -96,6 +96,12 @@ public void testSerialization() throws IOException { assertEquals( nodeStats.getOs().getCgroup().getCpuStat().getTimeThrottledNanos(), deserializedNodeStats.getOs().getCgroup().getCpuStat().getTimeThrottledNanos()); + assertEquals( + nodeStats.getOs().getCgroup().getMemoryLimitInBytes(), + deserializedNodeStats.getOs().getCgroup().getMemoryLimitInBytes()); + assertEquals( + nodeStats.getOs().getCgroup().getMemoryUsageInBytes(), + deserializedNodeStats.getOs().getCgroup().getMemoryUsageInBytes()); assertArrayEquals(nodeStats.getOs().getCpu().getLoadAverage(), deserializedNodeStats.getOs().getCpu().getLoadAverage(), 0); } @@ -294,7 +300,10 @@ private static NodeStats createNodeStats() { randomAlphaOfLength(8), randomNonNegativeLong(), randomNonNegativeLong(), - new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()))); + new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), + randomAlphaOfLength(8), + Long.toString(randomNonNegativeLong()), + Long.toString(randomNonNegativeLong()))); } ProcessStats processStats = frequently() ? new ProcessStats( diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java new file mode 100644 index 0000000000000..481bf5579e2ab --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java @@ -0,0 +1,135 @@ +package org.elasticsearch.action.admin.cluster.snapshots.status; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.cluster.SnapshotsInProgress; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; + + +public class SnapshotStatusTests extends ESTestCase { + + + public void testToString() throws Exception { + SnapshotsInProgress.State state = randomFrom(SnapshotsInProgress.State.values()); + String uuid = UUIDs.randomBase64UUID(); + SnapshotId id = new SnapshotId("test-snap", uuid); + Snapshot snapshot = new Snapshot("test-repo", id); + + String indexName = randomAlphaOfLengthBetween(3, 50); + int shardId = randomInt(); + ShardId testShardId = ShardId.fromString("[" + indexName + "][" + shardId + "]"); + SnapshotIndexShardStage shardStage = randomFrom(SnapshotIndexShardStage.values()); + SnapshotIndexShardStatus snapshotIndexShardStatus = new SnapshotIndexShardStatus(testShardId, shardStage); + List snapshotIndexShardStatuses = new ArrayList<>(); + snapshotIndexShardStatuses.add(snapshotIndexShardStatus); + SnapshotStatus status = new SnapshotStatus(snapshot, state, snapshotIndexShardStatuses); + + int initializingShards = 0; + int startedShards = 0; + int finalizingShards = 0; + int doneShards = 0; + int failedShards = 0; + int totalShards = 1; + + switch (shardStage) { + case INIT: + initializingShards++; + break; + case STARTED: + startedShards++; + break; + case FINALIZE: + finalizingShards++; + break; + case DONE: + doneShards++; + break; + case FAILURE: + failedShards++; + break; + default: + break; + } + + String expected = "{\n" + + " \"snapshot\" : \"test-snap\",\n" + + " \"repository\" : \"test-repo\",\n" + + " \"uuid\" : \"" + uuid + "\",\n" + + " \"state\" : \"" + state.toString() + "\",\n" + + " \"shards_stats\" : {\n" + + " \"initializing\" : " + initializingShards + ",\n" + + " \"started\" : " + startedShards + ",\n" + + " \"finalizing\" : " + finalizingShards + ",\n" + + " \"done\" : " + doneShards + ",\n" + + " \"failed\" : " + failedShards + ",\n" + + " \"total\" : " + totalShards + "\n" + + " },\n" + + " \"stats\" : {\n" + + " \"number_of_files\" : 0,\n" + + " \"processed_files\" : 0,\n" + + " \"total_size_in_bytes\" : 0,\n" + + " \"processed_size_in_bytes\" : 0,\n" + + " \"start_time_in_millis\" : 0,\n" + + " \"time_in_millis\" : 0\n" + + " },\n" + + " \"indices\" : {\n" + + " \"" + indexName + "\" : {\n" + + " \"shards_stats\" : {\n" + + " \"initializing\" : " + initializingShards + ",\n" + + " \"started\" : " + startedShards + ",\n" + + " \"finalizing\" : " + finalizingShards + ",\n" + + " \"done\" : " + doneShards + ",\n" + + " \"failed\" : " + failedShards + ",\n" + + " \"total\" : " + totalShards + "\n" + + " },\n" + + " \"stats\" : {\n" + + " \"number_of_files\" : 0,\n" + + " \"processed_files\" : 0,\n" + + " \"total_size_in_bytes\" : 0,\n" + + " \"processed_size_in_bytes\" : 0,\n" + + " \"start_time_in_millis\" : 0,\n" + + " \"time_in_millis\" : 0\n" + + " },\n" + + " \"shards\" : {\n" + + " \"" + shardId + "\" : {\n" + + " \"stage\" : \"" + shardStage.toString() + "\",\n" + + " \"stats\" : {\n" + + " \"number_of_files\" : 0,\n" + + " \"processed_files\" : 0,\n" + + " \"total_size_in_bytes\" : 0,\n" + + " \"processed_size_in_bytes\" : 0,\n" + + " \"start_time_in_millis\" : 0,\n" + + " \"time_in_millis\" : 0\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + assertEquals(expected, status.toString()); + } +} diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index 137d7593edca9..bcc70773146c6 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.monitor.jvm.JvmInfo; import java.nio.file.Path; @@ -150,9 +151,9 @@ public void testElasticsearchSettings() throws Exception { true, output -> {}, (foreground, pidFile, quiet, env) -> { - Map settings = env.settings().getAsMap(); - assertThat(settings, hasEntry("foo", "bar")); - assertThat(settings, hasEntry("baz", "qux")); + Settings settings = env.settings(); + assertEquals("bar", settings.get("foo")); + assertEquals("qux", settings.get("baz")); }, "-Efoo=bar", "-E", "baz=qux"); } diff --git a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index 43c33f3b95b09..8c1b22f7fb171 100644 --- a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -43,6 +43,8 @@ import java.util.HashMap; import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -144,7 +146,10 @@ protected static void assertHeaders(Map headers, Map headers = new HashMap<>(); + Settings asSettings = HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX); + assertHeaders(pool.getThreadContext().getHeaders(), + asSettings.keySet().stream().collect(Collectors.toMap(Function.identity(), k -> asSettings.get(k)))); } public static class InternalException extends Exception { @@ -161,9 +166,11 @@ protected static class AssertingActionListener implements ActionListener { private final String action; private final Map expectedHeaders; private final ThreadPool pool; + private static final Settings THREAD_HEADER_SETTINGS = HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX); public AssertingActionListener(String action, ThreadPool pool) { - this(action, HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsMap(), pool); + this(action, THREAD_HEADER_SETTINGS.keySet().stream() + .collect(Collectors.toMap(Function.identity(), k -> THREAD_HEADER_SETTINGS.get(k))), pool); } public AssertingActionListener(String action, Map expectedHeaders, ThreadPool pool) { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index 9dc989961f32b..4dd757c140311 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -278,7 +278,7 @@ public void testShrinkIndexIgnoresTemplates() throws Exception { assertThat(result.metaData().index("test").getAliases(), not(hasKey("alias1"))); assertThat(result.metaData().index("test").getCustoms(), not(hasKey("custom1"))); - assertThat(result.metaData().index("test").getSettings().getAsMap(), not(Matchers.hasKey("key1"))); + assertThat(result.metaData().index("test").getSettings().keySet(), not(Matchers.contains("key1"))); assertThat(getMappingsFromResponse(), not(Matchers.hasKey("mapping1"))); } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java index b2dba18181022..d6e6d1691a042 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.node; import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; @@ -59,7 +60,7 @@ public void testNameMatch() { Settings settings = Settings.builder() .put("xxx.name", "name1") .build(); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("name1", "id1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); @@ -73,7 +74,7 @@ public void testIdMatch() { Settings settings = Settings.builder() .put("xxx._id", "id1") .build(); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("name1", "id1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); @@ -88,7 +89,7 @@ public void testIdOrNameMatch() { .put("xxx._id", "id1,blah") .put("xxx.name", "blah,name2") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); final Version version = Version.CURRENT; DiscoveryNode node = new DiscoveryNode("name1", "id1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), version); @@ -106,7 +107,7 @@ public void testTagAndGroupMatch() { .put("xxx.tag", "A") .put("xxx.group", "B") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(AND, "xxx.", settings); Map attributes = new HashMap<>(); attributes.put("tag", "A"); @@ -139,7 +140,7 @@ public void testStarMatch() { Settings settings = Settings.builder() .put("xxx.name", "*") .build(); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("name1", "id1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); @@ -151,7 +152,7 @@ public void testIpBindFilteringMatchingAnd() { .put("xxx.tag", "A") .put("xxx." + randomFrom("_ip", "_host_ip", "_publish_ip"), "192.1.1.54") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(AND, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); @@ -162,7 +163,7 @@ public void testIpBindFilteringNotMatching() { .put("xxx.tag", "B") .put("xxx." + randomFrom("_ip", "_host_ip", "_publish_ip"), "192.1.1.54") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(AND, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); @@ -173,7 +174,7 @@ public void testIpBindFilteringNotMatchingAnd() { .put("xxx.tag", "A") .put("xxx." + randomFrom("_ip", "_host_ip", "_publish_ip"), "8.8.8.8") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(AND, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); @@ -184,7 +185,7 @@ public void testIpBindFilteringMatchingOr() { .put("xxx." + randomFrom("_ip", "_host_ip", "_publish_ip"), "192.1.1.54") .put("xxx.tag", "A") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); @@ -195,7 +196,7 @@ public void testIpBindFilteringNotMatchingOr() { .put("xxx.tag", "A") .put("xxx." + randomFrom("_ip", "_host_ip", "_publish_ip"), "8.8.8.8") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); @@ -206,7 +207,7 @@ public void testIpPublishFilteringMatchingAnd() { .put("xxx.tag", "A") .put("xxx._publish_ip", "192.1.1.54") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(AND, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); @@ -217,7 +218,7 @@ public void testIpPublishFilteringNotMatchingAnd() { .put("xxx.tag", "A") .put("xxx._publish_ip", "8.8.8.8") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(AND, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); @@ -228,7 +229,7 @@ public void testIpPublishFilteringMatchingOr() { .put("xxx._publish_ip", "192.1.1.54") .put("xxx.tag", "A") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); @@ -239,7 +240,7 @@ public void testIpPublishFilteringNotMatchingOr() { .put("xxx.tag", "A") .put("xxx._publish_ip", "8.8.8.8") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); @@ -250,7 +251,7 @@ public void testIpPublishFilteringMatchingWildcard() { Settings settings = shuffleSettings(Settings.builder() .put("xxx._publish_ip", matches ? "192.1.*" : "192.2.*") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, emptyMap(), emptySet(), null); assertThat(filters.match(node), equalTo(matches)); @@ -263,17 +264,22 @@ public void testCommaSeparatedValuesTrimmed() { .put("xxx." + randomFrom("_ip", "_host_ip", "_publish_ip"), "192.1.1.1, 192.1.1.54") .put("xxx.tag", "A, B") .build()); - DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); + DiscoveryNodeFilters filters = buildFromSettings(OR, "xxx.", settings); assertTrue(filters.match(node)); } private Settings shuffleSettings(Settings source) { Settings.Builder settings = Settings.builder(); - List keys = new ArrayList<>(source.getAsMap().keySet()); + List keys = new ArrayList<>(source.keySet()); Collections.shuffle(keys, random()); for (String o : keys) { - settings.put(o, source.getAsMap().get(o)); + settings.put(o, source.get(o)); } return settings.build(); } + + public static DiscoveryNodeFilters buildFromSettings(DiscoveryNodeFilters.OpType opType, String prefix, Settings settings) { + Setting.AffixSetting setting = Setting.prefixKeySetting(prefix, key -> Setting.simpleString(key)); + return DiscoveryNodeFilters.buildFromKeyValue(opType, setting.getAsMap(settings)); + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 04aed6eab3259..7b1ac5116f2dd 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -78,7 +78,7 @@ public void testDeleteIsAppliedFirst() { .get(); assertAcked(response); - assertEquals(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), "1s"); + assertEquals(response.getTransientSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), "1s"); assertTrue(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY)); assertFalse(response.getTransientSettings().getAsBoolean(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), null)); @@ -86,7 +86,7 @@ public void testDeleteIsAppliedFirst() { .prepareUpdateSettings() .setTransientSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*")).put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "2s")) .get(); - assertEquals(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), "2s"); + assertEquals(response.getTransientSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), "2s"); assertNull(response.getTransientSettings().getAsBoolean(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), null)); } @@ -102,7 +102,7 @@ public void testResetClusterSetting() { .get(); assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(response.getTransientSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L)); assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); @@ -113,7 +113,7 @@ public void testResetClusterSetting() { .get(); assertAcked(response); - assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertNull(response.getTransientSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); @@ -125,7 +125,7 @@ public void testResetClusterSetting() { .get(); assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(response.getTransientSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L)); assertFalse(discoverySettings.getPublishDiff()); response = client().admin().cluster() @@ -133,8 +133,8 @@ public void testResetClusterSetting() { .setTransientSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*"))) .get(); - assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); - assertNull(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey())); + assertNull(response.getTransientSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertNull(response.getTransientSettings().get(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey())); assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); @@ -145,7 +145,7 @@ public void testResetClusterSetting() { .get(); assertAcked(response); - assertThat(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(response.getPersistentSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L)); assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); @@ -156,7 +156,7 @@ public void testResetClusterSetting() { .get(); assertAcked(response); - assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertNull(response.getPersistentSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); @@ -169,7 +169,7 @@ public void testResetClusterSetting() { .get(); assertAcked(response); - assertThat(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(response.getPersistentSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L)); assertFalse(discoverySettings.getPublishDiff()); response = client().admin().cluster() @@ -177,8 +177,8 @@ public void testResetClusterSetting() { .setPersistentSettings(Settings.builder().putNull((randomBoolean() ? "discovery.zen.*" : "*"))) .get(); - assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); - assertNull(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey())); + assertNull(response.getPersistentSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey())); + assertNull(response.getPersistentSettings().get(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey())); assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.get(Settings.EMPTY))); assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY))); } @@ -261,7 +261,7 @@ public void testUpdateDiscoveryPublishTimeout() { .get(); assertAcked(response); - assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); + assertThat(response.getTransientSettings().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s")); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L)); try { diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 1c64bcb27242f..24f9550a78de6 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -468,21 +468,21 @@ public void testDiff() throws IOException { ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(fooBar, fooBarBaz, foorBarQuux, someGroup, someAffix))); Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); - assertEquals(4, diff.size()); // 4 since foo.bar.quux has 3 values essentially + assertEquals(2, diff.size()); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1)); assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"}); diff = settings.diff( Settings.builder().put("foo.bar", 5).build(), Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build()); - assertEquals(4, diff.size()); // 4 since foo.bar.quux has 3 values essentially + assertEquals(2, diff.size()); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17)); assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"}); diff = settings.diff( Settings.builder().put("some.group.foo", 5).build(), Settings.builder().put("some.group.foobar", 17).put("some.group.foo", 25).build()); - assertEquals(6, diff.size()); // 6 since foo.bar.quux has 3 values essentially + assertEquals(4, diff.size()); assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17)); assertNull(diff.get("some.group.foo")); assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"}); @@ -492,7 +492,7 @@ public void testDiff() throws IOException { diff = settings.diff( Settings.builder().put("some.prefix.foo.somekey", 5).build(), Settings.builder().put("some.prefix.foobar.somekey", 17).put("some.prefix.foo.somekey", 18).build()); - assertEquals(6, diff.size()); // 6 since foo.bar.quux has 3 values essentially + assertEquals(4, diff.size()); assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17)); assertNull(diff.get("some.prefix.foo.somekey")); assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"}); @@ -518,7 +518,7 @@ public void testDiffWithAffixAndComplexMatcher() { diff = settings.diff( Settings.builder().put("foo.bar", 5).build(), Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build()); - assertEquals(4, diff.size()); + assertEquals(2, diff.size()); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17)); assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"}); @@ -548,7 +548,7 @@ public void testDiffWithAffixAndComplexMatcher() { .putArray("foo.bar.quux", "x", "y", "z") .putArray("foo.baz.quux", "d", "e", "f") .build()); - assertEquals(9, diff.size()); + assertEquals(5, diff.size()); assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17)); assertNull(diff.get("some.prefix.foo.somekey")); assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"x", "y", "z"}); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 2010c6cacc404..4dfedf519bd16 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -337,7 +337,7 @@ public void testGroups() { Settings.EMPTY); fail("not accepted"); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "illegal value can't update [foo.bar.] from [{}] to [{1.value=1, 2.value=2}]"); + assertEquals(ex.getMessage(), "illegal value can't update [foo.bar.] from [{}] to [{\"1.value\":\"1\",\"2.value\":\"2\"}]"); } } @@ -514,11 +514,11 @@ public void testListSettingAcceptsNumberSyntax() { List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); // try to parse this really annoying format - for (String key : builder.internalMap().keySet()) { + for (String key : builder.keys()) { assertTrue("key: " + key + " doesn't match", listSetting.match(key)); } builder = Settings.builder().put("foo.bar", "1,2,3"); - for (String key : builder.internalMap().keySet()) { + for (String key : builder.keys()) { assertTrue("key: " + key + " doesn't match", listSetting.match(key)); } assertFalse(listSetting.match("foo_bar")); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 404f3a4cb10c7..9e6d4be7095f0 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -108,7 +108,7 @@ private void testFiltering(Settings source, Settings filtered, String... pattern // Test using direct filtering Settings filteredSettings = settingsFilter.filter(source); - assertThat(filteredSettings.getAsMap().entrySet(), equalTo(filtered.getAsMap().entrySet())); + assertThat(filteredSettings, equalTo(filtered)); // Test using toXContent filtering RestRequest request = new FakeRestRequest(); @@ -119,6 +119,6 @@ private void testFiltering(Settings source, Settings filtered, String... pattern xContentBuilder.endObject(); String filteredSettingsString = xContentBuilder.string(); filteredSettings = Settings.builder().loadFromSource(filteredSettingsString, xContentBuilder.contentType()).build(); - assertThat(filteredSettings.getAsMap().entrySet(), equalTo(filtered.getAsMap().entrySet())); + assertThat(filteredSettings, equalTo(filtered)); } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index a1c2711e5acc2..6a2be8217a661 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -113,8 +113,8 @@ public void testRegisterSettingsFilter() { Setting.boolSetting("bar.baz", true, Property.NodeScope)), Arrays.asList("foo.*")); assertInstanceBinding(module, Settings.class, (s) -> s == settings); assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).size() == 1); - assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).getAsMap().containsKey("bar.baz")); - assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).getAsMap().get("bar.baz").equals("false")); + assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).keySet().contains("bar.baz")); + assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).get("bar.baz").equals("false")); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 44ffdd0b59874..4a9e55c324653 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; @@ -28,14 +30,18 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.hamcrest.CoreMatchers; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.io.StringBufferInputStream; +import java.io.StringReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; @@ -255,7 +261,7 @@ public void testThatArraysAreOverriddenCorrectly() throws IOException { .put(Settings.builder().put("value.data", "1").build()) .build(); assertThat(settings.get("value.data"), is("1")); - assertThat(settings.get("value"), is(nullValue())); + assertThat(settings.get("value"), is("[4, 5]")); } public void testPrefixNormalization() { @@ -303,34 +309,27 @@ public void testFilteredMap() { builder.put("a.b.c.d", "ab3"); - Map fiteredMap = builder.build().filter((k) -> k.startsWith("a.b")).getAsMap(); - assertEquals(3, fiteredMap.size()); + Settings filteredSettings = builder.build().filter((k) -> k.startsWith("a.b")); + assertEquals(3, filteredSettings.size()); int numKeys = 0; - for (String k : fiteredMap.keySet()) { + for (String k : filteredSettings.keySet()) { numKeys++; assertTrue(k.startsWith("a.b")); } assertEquals(3, numKeys); - int numValues = 0; - - for (String v : fiteredMap.values()) { - numValues++; - assertTrue(v.startsWith("ab")); - } - assertEquals(3, numValues); - assertFalse(fiteredMap.containsKey("a.c")); - assertFalse(fiteredMap.containsKey("a")); - assertTrue(fiteredMap.containsKey("a.b")); - assertTrue(fiteredMap.containsKey("a.b.c")); - assertTrue(fiteredMap.containsKey("a.b.c.d")); + assertFalse(filteredSettings.keySet().contains("a.c")); + assertFalse(filteredSettings.keySet().contains("a")); + assertTrue(filteredSettings.keySet().contains("a.b")); + assertTrue(filteredSettings.keySet().contains("a.b.c")); + assertTrue(filteredSettings.keySet().contains("a.b.c.d")); expectThrows(UnsupportedOperationException.class, () -> - fiteredMap.remove("a.b")); - assertEquals("ab1", fiteredMap.get("a.b")); - assertEquals("ab2", fiteredMap.get("a.b.c")); - assertEquals("ab3", fiteredMap.get("a.b.c.d")); + filteredSettings.keySet().remove("a.b")); + assertEquals("ab1", filteredSettings.get("a.b")); + assertEquals("ab2", filteredSettings.get("a.b.c")); + assertEquals("ab3", filteredSettings.get("a.b.c.d")); - Iterator iterator = fiteredMap.keySet().iterator(); + Iterator iterator = filteredSettings.keySet().iterator(); for (int i = 0; i < 10; i++) { assertTrue(iterator.hasNext()); } @@ -356,7 +355,7 @@ public void testPrefixMap() { builder.put("a.c", "ac1"); builder.put("a.b.c.d", "ab3"); - Map prefixMap = builder.build().getByPrefix("a.").getAsMap(); + Settings prefixMap = builder.build().getByPrefix("a."); assertEquals(4, prefixMap.size()); int numKeys = 0; for (String k : prefixMap.keySet()) { @@ -365,20 +364,14 @@ public void testPrefixMap() { } assertEquals(4, numKeys); - int numValues = 0; - for (String v : prefixMap.values()) { - numValues++; - assertTrue(v, v.startsWith("ab") || v.startsWith("ac")); - } - assertEquals(4, numValues); - assertFalse(prefixMap.containsKey("a")); - assertTrue(prefixMap.containsKey("c")); - assertTrue(prefixMap.containsKey("b")); - assertTrue(prefixMap.containsKey("b.c")); - assertTrue(prefixMap.containsKey("b.c.d")); + assertFalse(prefixMap.keySet().contains("a")); + assertTrue(prefixMap.keySet().contains("c")); + assertTrue(prefixMap.keySet().contains("b")); + assertTrue(prefixMap.keySet().contains("b.c")); + assertTrue(prefixMap.keySet().contains("b.c.d")); expectThrows(UnsupportedOperationException.class, () -> - prefixMap.remove("a.b")); + prefixMap.keySet().remove("a.b")); assertEquals("ab1", prefixMap.get("b")); assertEquals("ab2", prefixMap.get("b.c")); assertEquals("ab3", prefixMap.get("b.c.d")); @@ -444,27 +437,24 @@ public void testEmptyFilterMap() { builder.put("a.c", "ac1"); builder.put("a.b.c.d", "ab3"); - Map fiteredMap = builder.build().filter((k) -> false).getAsMap(); - assertEquals(0, fiteredMap.size()); - for (String k : fiteredMap.keySet()) { + Settings filteredSettings = builder.build().filter((k) -> false); + assertEquals(0, filteredSettings.size()); + for (String k : filteredSettings.keySet()) { fail("no element"); } - for (String v : fiteredMap.values()) { - fail("no element"); - } - assertFalse(fiteredMap.containsKey("a.c")); - assertFalse(fiteredMap.containsKey("a")); - assertFalse(fiteredMap.containsKey("a.b")); - assertFalse(fiteredMap.containsKey("a.b.c")); - assertFalse(fiteredMap.containsKey("a.b.c.d")); + assertFalse(filteredSettings.keySet().contains("a.c")); + assertFalse(filteredSettings.keySet().contains("a")); + assertFalse(filteredSettings.keySet().contains("a.b")); + assertFalse(filteredSettings.keySet().contains("a.b.c")); + assertFalse(filteredSettings.keySet().contains("a.b.c.d")); expectThrows(UnsupportedOperationException.class, () -> - fiteredMap.remove("a.b")); - assertNull(fiteredMap.get("a.b")); - assertNull(fiteredMap.get("a.b.c")); - assertNull(fiteredMap.get("a.b.c.d")); + filteredSettings.keySet().remove("a.b")); + assertNull(filteredSettings.get("a.b")); + assertNull(filteredSettings.get("a.b.c")); + assertNull(filteredSettings.get("a.b.c.d")); - Iterator iterator = fiteredMap.keySet().iterator(); + Iterator iterator = filteredSettings.keySet().iterator(); for (int i = 0; i < 10; i++) { assertFalse(iterator.hasNext()); } @@ -486,13 +476,18 @@ public void testWriteSettingsToStream() throws IOException { secureSettings.setString("test.key2.bog", "somethingsecure"); Settings.Builder builder = Settings.builder(); builder.put("test.key1.baz", "blah1"); + builder.putNull("test.key3.bar"); + builder.putArray("test.key4.foo", "1", "2"); builder.setSecureSettings(secureSettings); - assertEquals(5, builder.build().size()); + assertEquals(7, builder.build().size()); Settings.writeSettingsToStream(builder.build(), out); StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); Settings settings = Settings.readSettingsFromStream(in); - assertEquals(1, settings.size()); + assertEquals(3, settings.size()); assertEquals("blah1", settings.get("test.key1.baz")); + assertNull(settings.get("test.key3.bar")); + assertTrue(settings.keySet().contains("test.key3.bar")); + assertArrayEquals(new String[] {"1", "2"}, settings.getAsArray("test.key4.foo")); } public void testSecureSettingConflict() { @@ -503,14 +498,12 @@ public void testSecureSettingConflict() { } public void testGetAsArrayFailsOnDuplicates() { - final Settings settings = - Settings.builder() - .put("foobar.0", "bar") - .put("foobar.1", "baz") - .put("foobar", "foo") - .build(); - final IllegalStateException e = expectThrows(IllegalStateException.class, () -> settings.getAsArray("foobar")); - assertThat(e, hasToString(containsString("settings object contains values for [foobar=foo] and [foobar.0=bar]"))); + final IllegalStateException e = expectThrows(IllegalStateException.class, () -> Settings.builder() + .put("foobar.0", "bar") + .put("foobar.1", "baz") + .put("foobar", "foo") + .build()); + assertThat(e, hasToString(containsString("settings builder can't contain values for [foobar=foo] and [foobar.0=bar]"))); } public void testToAndFromXContent() throws IOException { @@ -528,7 +521,7 @@ public void testToAndFromXContent() throws IOException { builder.endObject(); XContentParser parser = createParser(builder); Settings build = Settings.fromXContent(parser); - assertEquals(7, build.size()); // each list element is it's own key hence 7 and not 5 + assertEquals(5, build.size()); assertArrayEquals(new String[] {"1", "2", "3"}, build.getAsArray("foo.bar.baz")); assertEquals(2, build.getAsInt("foo.foobar", 0).intValue()); assertEquals("test", build.get("rootfoo")); @@ -547,8 +540,8 @@ public void testSimpleJsonSettings() throws Exception { assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2)); // check array - assertThat(settings.get("test1.test3.0"), equalTo("test3-1")); - assertThat(settings.get("test1.test3.1"), equalTo("test3-2")); + assertNull(settings.get("test1.test3.0")); + assertNull(settings.get("test1.test3.1")); assertThat(settings.getAsArray("test1.test3").length, equalTo(2)); assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1")); assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2")); @@ -587,7 +580,7 @@ public void testToXContent() throws IOException { builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap())); builder.endObject(); - assertEquals("{\"foo\":{\"bar\":{\"0\":\"1\",\"1\":\"2\",\"2\":\"3\",\"baz\":\"test\"}}}", builder.string()); + assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string()); test = Settings.builder().putArray("foo.bar", "1", "2", "3").build(); builder = XContentBuilder.builder(XContentType.JSON.xContent()); @@ -600,7 +593,7 @@ public void testToXContent() throws IOException { builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true"))); builder.endObject(); - assertEquals("{\"foo.bar.0\":\"1\",\"foo.bar.1\":\"2\",\"foo.bar.2\":\"3\"}", builder.string()); + assertEquals("{\"foo.bar\":[\"1\",\"2\",\"3\"]}", builder.string()); } public void testLoadEmptyStream() throws IOException { @@ -620,13 +613,22 @@ public void testSimpleYamlSettings() throws Exception { assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2)); // check array - assertThat(settings.get("test1.test3.0"), equalTo("test3-1")); - assertThat(settings.get("test1.test3.1"), equalTo("test3-2")); + assertNull(settings.get("test1.test3.0")); + assertNull(settings.get("test1.test3.1")); assertThat(settings.getAsArray("test1.test3").length, equalTo(2)); assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1")); assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2")); } + public void testYamlLegacyList() throws IOException { + Settings settings = Settings.builder() + .loadFromStream("foo.yml", new ByteArrayInputStream("foo.bar.baz.0: 1\nfoo.bar.baz.1: 2".getBytes(StandardCharsets.UTF_8)), + false).build(); + assertThat(settings.getAsArray("foo.bar.baz").length, equalTo(2)); + assertThat(settings.getAsArray("foo.bar.baz")[0], equalTo("1")); + assertThat(settings.getAsArray("foo.bar.baz")[1], equalTo("2")); + } + public void testIndentation() throws Exception { String yaml = "/org/elasticsearch/common/settings/loader/indentation-settings.yml"; ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> { @@ -654,4 +656,78 @@ public void testMissingValue() throws Exception { e.getMessage(), e.getMessage().contains("null-valued setting found for key [foo] found at line number [1], column number [5]")); } + + public void testReadLegacyFromStream() throws IOException { + BytesStreamOutput output = new BytesStreamOutput(); + output.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0)); + output.writeVInt(5); + output.writeString("foo.bar.1"); + output.writeOptionalString("1"); + output.writeString("foo.bar.0"); + output.writeOptionalString("0"); + output.writeString("foo.bar.2"); + output.writeOptionalString("2"); + output.writeString("foo.bar.3"); + output.writeOptionalString("3"); + output.writeString("foo.bar.baz"); + output.writeOptionalString("baz"); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes())); + in.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0)); + Settings settings = Settings.readSettingsFromStream(in); + assertEquals(2, settings.size()); + assertArrayEquals(new String[]{"0", "1", "2", "3"}, settings.getAsArray("foo.bar")); + assertEquals("baz", settings.get("foo.bar.baz")); + } + + public void testWriteLegacyOutput() throws IOException { + BytesStreamOutput output = new BytesStreamOutput(); + output.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0)); + Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3") + .put("foo.bar.baz", "baz").putNull("foo.null").build(); + Settings.writeSettingsToStream(settings, output); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes())); + assertEquals(6, in.readVInt()); + Map keyValues = new HashMap<>(); + for (int i = 0; i < 6; i++){ + keyValues.put(in.readString(), in.readOptionalString()); + } + assertEquals(keyValues.get("foo.bar.0"), "0"); + assertEquals(keyValues.get("foo.bar.1"), "1"); + assertEquals(keyValues.get("foo.bar.2"), "2"); + assertEquals(keyValues.get("foo.bar.3"), "3"); + assertEquals(keyValues.get("foo.bar.baz"), "baz"); + assertTrue(keyValues.containsKey("foo.null")); + assertNull(keyValues.get("foo.null")); + + in = StreamInput.wrap(BytesReference.toBytes(output.bytes())); + in.setVersion(output.getVersion()); + Settings readSettings = Settings.readSettingsFromStream(in); + assertEquals(3, readSettings.size()); + assertArrayEquals(new String[] {"0", "1", "2", "3"}, readSettings.getAsArray("foo.bar")); + assertEquals(readSettings.get("foo.bar.baz"), "baz"); + assertTrue(readSettings.keySet().contains("foo.null")); + assertNull(readSettings.get("foo.null")); + } + + public void testReadWriteArray() throws IOException { + BytesStreamOutput output = new BytesStreamOutput(); + output.setVersion(randomFrom(Version.CURRENT, Version.V_6_1_0)); + Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build(); + Settings.writeSettingsToStream(settings, output); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes())); + Settings build = Settings.readSettingsFromStream(in); + assertEquals(2, build.size()); + assertArrayEquals(build.getAsArray("foo.bar"), new String[] {"0", "1", "2", "3"}); + assertEquals(build.get("foo.bar.baz"), "baz"); + } + + public void testCopy() { + Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build(); + assertArrayEquals(new String[] {"0", "1", "2", "3"}, Settings.builder().copy("foo.bar", settings).build().getAsArray("foo.bar")); + assertEquals("baz", Settings.builder().copy("foo.bar.baz", settings).build().get("foo.bar.baz")); + assertNull(Settings.builder().copy("foo.bar.baz", settings).build().get("test")); + assertTrue(Settings.builder().copy("test", settings).build().keySet().contains("test")); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> Settings.builder().copy("not_there", settings)); + assertEquals("source key not found in the source settings", iae.getMessage()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 975c67dcfbfe0..1caf0fa135e60 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -191,7 +191,7 @@ public void beforeIndexRemoved(IndexService indexService, IndexRemovalReason rea module.addIndexEventListener(eventListener); IndexService indexService = newIndexService(module); IndexSettings x = indexService.getIndexSettings(); - assertEquals(x.getSettings().getAsMap(), indexSettings.getSettings().getAsMap()); + assertEquals(x.getSettings(), indexSettings.getSettings()); assertEquals(x.getIndex(), index); indexService.getIndexEventListener().beforeIndexRemoved(null, null); assertTrue(atomicBoolean.get()); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 8e43d2433e928..8d8878fa28251 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -60,7 +60,7 @@ public void testRunListener() { assertEquals("0xdeadbeef", settings.getUUID()); assertFalse(settings.updateIndexMetaData(metaData)); - assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); + assertEquals(metaData.getSettings(), settings.getSettings()); assertEquals(0, integer.get()); assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 42) .build()))); @@ -83,7 +83,7 @@ public void testSettingsUpdateValidator() { assertEquals("0xdeadbeef", settings.getUUID()); assertFalse(settings.updateIndexMetaData(metaData)); - assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); + assertEquals(metaData.getSettings(), settings.getSettings()); assertEquals(0, integer.get()); expectThrows(IllegalArgumentException.class, () -> settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 42).build()))); @@ -156,7 +156,7 @@ public void testSettingsConsistency() { } catch (IllegalArgumentException ex) { assertEquals("uuid mismatch on settings update expected: 0xdeadbeef but was: _na_", ex.getMessage()); } - assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); + assertEquals(metaData.getSettings(), settings.getSettings()); } public IndexSettings newIndexSettings(IndexMetaData metaData, Settings nodeSettings, Setting... settings) { diff --git a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index a4f459d0e55a6..a0afe28a17bce 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; +import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type; import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.search.internal.SearchContext; @@ -123,6 +124,9 @@ protected MultiMatchQueryBuilder doCreateTestQueryBuilder() { if (randomBoolean()) { query.autoGenerateSynonymsPhraseQuery(randomBoolean()); } + if (randomBoolean()) { + query.fuzzyTranspositions(randomBoolean()); + } // test with fields with boost and patterns delegated to the tests further below return query; } @@ -241,6 +245,7 @@ public void testFromJson() throws IOException { " \"lenient\" : false,\n" + " \"zero_terms_query\" : \"NONE\",\n" + " \"auto_generate_synonyms_phrase_query\" : true,\n" + + " \"fuzzy_transpositions\" : false,\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; @@ -252,6 +257,7 @@ public void testFromJson() throws IOException { assertEquals(json, 3, parsed.fields().size()); assertEquals(json, MultiMatchQueryBuilder.Type.MOST_FIELDS, parsed.type()); assertEquals(json, Operator.OR, parsed.operator()); + assertEquals(json, false, parsed.fuzzyTranspositions()); } /** @@ -317,4 +323,19 @@ public void testFuzzinessOnNonStringField() throws Exception { query.analyzer(null); query.toQuery(context); // no exception } + + public void testToFuzzyQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + + MultiMatchQueryBuilder qb = new MultiMatchQueryBuilder("text").field(STRING_FIELD_NAME); + qb.fuzziness(Fuzziness.TWO); + qb.prefixLength(2); + qb.maxExpansions(5); + qb.fuzzyTranspositions(false); + + Query query = qb.toQuery(createShardContext()); + FuzzyQuery expected = new FuzzyQuery(new Term(STRING_FIELD_NAME, "text"), 2, 2, 5, false); + + assertEquals(expected, query); + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 9609c8415a789..c29172d88afa8 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -162,6 +162,9 @@ protected QueryStringQueryBuilder doCreateTestQueryBuilder() { if (randomBoolean()) { queryStringQueryBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean()); } + if (randomBoolean()) { + queryStringQueryBuilder.fuzzyTranspositions(randomBoolean()); + } queryStringQueryBuilder.type(randomFrom(MultiMatchQueryBuilder.Type.values())); return queryStringQueryBuilder; } @@ -864,6 +867,7 @@ public void testFromJson() throws IOException { " \"phrase_slop\" : 0,\n" + " \"escape\" : false,\n" + " \"auto_generate_synonyms_phrase_query\" : true,\n" + + " \"fuzzy_transpositions\" : false,\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; @@ -873,6 +877,7 @@ public void testFromJson() throws IOException { assertEquals(json, "this AND that OR thus", parsed.queryString()); assertEquals(json, "content", parsed.defaultField()); + assertEquals(json, false, parsed.fuzzyTranspositions()); } public void testExpandedTerms() throws Exception { @@ -1029,6 +1034,19 @@ public void testQuoteAnalyzer() throws Exception { assertEquals(expectedQuery, query); } + public void testToFuzzyQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + + Query query = new QueryStringQueryBuilder("text~2") + .field(STRING_FIELD_NAME) + .fuzzyPrefixLength(2) + .fuzzyMaxExpansions(5) + .fuzzyTranspositions(false) + .toQuery(createShardContext()); + FuzzyQuery expected = new FuzzyQuery(new Term(STRING_FIELD_NAME, "text"), 2, 2, 5, false); + assertEquals(expected, query); + } + private static IndexMetaData newIndexMeta(String name, Settings oldIndexSettings, Settings indexSettings) { Settings build = Settings.builder().put(oldIndexSettings) .put(indexSettings) diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 76f1bed587024..efacd3c1faba4 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -105,6 +105,15 @@ protected SimpleQueryStringBuilder doCreateTestQueryBuilder() { if (randomBoolean()) { result.autoGenerateSynonymsPhraseQuery(randomBoolean()); } + if (randomBoolean()) { + result.fuzzyPrefixLength(randomIntBetween(0, 5)); + } + if (randomBoolean()) { + result.fuzzyMaxExpansions(randomIntBetween(1, 5)); + } + if (randomBoolean()) { + result.fuzzyTranspositions(randomBoolean()); + } return result; } @@ -126,6 +135,18 @@ public void testDefaults() { assertEquals("Wrong default default lenient.", false, qb.lenient()); assertEquals("Wrong default default lenient field.", false, SimpleQueryStringBuilder.DEFAULT_LENIENT); + + assertEquals("Wrong default default fuzzy prefix length.", FuzzyQuery.defaultPrefixLength, qb.fuzzyPrefixLength()); + assertEquals("Wrong default default fuzzy prefix length field.", + FuzzyQuery.defaultPrefixLength, SimpleQueryStringBuilder.DEFAULT_FUZZY_PREFIX_LENGTH); + + assertEquals("Wrong default default fuzzy max expansions.", FuzzyQuery.defaultMaxExpansions, qb.fuzzyMaxExpansions()); + assertEquals("Wrong default default fuzzy max expansions field.", + FuzzyQuery.defaultMaxExpansions, SimpleQueryStringBuilder.DEFAULT_FUZZY_MAX_EXPANSIONS); + + assertEquals("Wrong default default fuzzy transpositions.", FuzzyQuery.defaultTranspositions, qb.fuzzyTranspositions()); + assertEquals("Wrong default default fuzzy transpositions field.", + FuzzyQuery.defaultTranspositions, SimpleQueryStringBuilder.DEFAULT_FUZZY_TRANSPOSITIONS); } public void testDefaultNullComplainFlags() { @@ -336,6 +357,9 @@ public void testFromJson() throws IOException { " \"analyze_wildcard\" : false,\n" + " \"quote_field_suffix\" : \".quote\",\n" + " \"auto_generate_synonyms_phrase_query\" : true,\n" + + " \"fuzzy_prefix_length\" : 1,\n" + + " \"fuzzy_max_expansions\" : 5,\n" + + " \"fuzzy_transpositions\" : false,\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; @@ -347,6 +371,9 @@ public void testFromJson() throws IOException { assertEquals(json, 1, parsed.fields().size()); assertEquals(json, "snowball", parsed.analyzer()); assertEquals(json, ".quote", parsed.quoteFieldSuffix()); + assertEquals(json, 1, parsed.fuzzyPrefixLength()); + assertEquals(json, 5, parsed.fuzzyMaxExpansions()); + assertEquals(json, false, parsed.fuzzyTranspositions()); } public void testMinimumShouldMatch() throws IOException { @@ -567,6 +594,19 @@ public void testDefaultField() throws Exception { ); } + public void testToFuzzyQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + + Query query = new SimpleQueryStringBuilder("text~2") + .field(STRING_FIELD_NAME) + .fuzzyPrefixLength(2) + .fuzzyMaxExpansions(5) + .fuzzyTranspositions(false) + .toQuery(createShardContext()); + FuzzyQuery expected = new FuzzyQuery(new Term(STRING_FIELD_NAME, "text"), 2, 2, 5, false); + assertEquals(expected, query); + } + private static IndexMetaData newIndexMeta(String name, Settings oldIndexSettings, Settings indexSettings) { Settings build = Settings.builder().put(oldIndexSettings) .put(indexSettings) diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 6dd9126a51983..ed38ec8b05b96 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -123,7 +123,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate) .put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate) .put(MockEngineSupport.WRAP_READER_RATIO.getKey(), 1.0d); - logger.info("creating index: [test] using settings: [{}]", settings.build().getAsMap()); + logger.info("creating index: [test] using settings: [{}]", settings.build()); CreateIndexResponse response = client().admin().indices().prepareCreate("test") .setSettings(settings) .addMapping("type", mapping, XContentType.JSON).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 99e2e7b9aa1d3..e81538b9057ba 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -376,7 +376,7 @@ public void testInvalidSettings() throws Exception { createIndex("test"); GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertNull(getSettingsResponse.getIndexToSettings().get("test").getAsMap().get("index.does_not_exist")); + assertNull(getSettingsResponse.getIndexToSettings().get("test").get("index.does_not_exist")); } public void testIndexTemplateWithAliases() throws Exception { @@ -852,6 +852,6 @@ public void testPartitionedTemplate() throws Exception { .get(); GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test_good").get(); - assertEquals("6", getSettingsResponse.getIndexToSettings().get("test_good").getAsMap().get("index.routing_partition_size")); + assertEquals("6", getSettingsResponse.getIndexToSettings().get("test_good").get("index.routing_partition_size")); } } diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 71305c41f56d7..21e9c488d9966 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.test.ESTestCase; +import java.math.BigInteger; import java.util.Arrays; import java.util.List; @@ -117,6 +118,12 @@ public void testOsStats() { assertThat(stats.getCgroup().getCpuStat().getNumberOfElapsedPeriods(), greaterThanOrEqualTo(0L)); assertThat(stats.getCgroup().getCpuStat().getNumberOfTimesThrottled(), greaterThanOrEqualTo(0L)); assertThat(stats.getCgroup().getCpuStat().getTimeThrottledNanos(), greaterThanOrEqualTo(0L)); + // These could be null if transported from a node running an older version, but shouldn't be null on the current node + assertThat(stats.getCgroup().getMemoryControlGroup(), notNullValue()); + assertThat(stats.getCgroup().getMemoryLimitInBytes(), notNullValue()); + assertThat(new BigInteger(stats.getCgroup().getMemoryLimitInBytes()), greaterThan(BigInteger.ZERO)); + assertThat(stats.getCgroup().getMemoryUsageInBytes(), notNullValue()); + assertThat(new BigInteger(stats.getCgroup().getMemoryUsageInBytes()), greaterThan(BigInteger.ZERO)); } } else { assertNull(stats.getCgroup()); @@ -159,7 +166,7 @@ List readProcSelfCgroup() { "9:net_cls,net_prio:/", "8:pids:/", "7:blkio:/", - "6:memory:/", + "6:memory:/" + hierarchy, "5:devices:/user.slice", "4:hugetlb:/", "3:perf_event:/", @@ -194,6 +201,19 @@ List readSysFsCgroupCpuAcctCpuStat(String controlGroup) { "throttled_time 139298645489"); } + @Override + String readSysFsCgroupMemoryLimitInBytes(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + // This is the highest value that can be stored in an unsigned 64 bit number, hence too big for long + return "18446744073709551615"; + } + + @Override + String readSysFsCgroupMemoryUsageInBytes(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + return "4796416"; + } + @Override boolean areCgroupStatsAvailable() { return areCgroupStatsAvailable; @@ -213,6 +233,8 @@ boolean areCgroupStatsAvailable() { assertThat(cgroup.getCpuStat().getNumberOfElapsedPeriods(), equalTo(17992L)); assertThat(cgroup.getCpuStat().getNumberOfTimesThrottled(), equalTo(1311L)); assertThat(cgroup.getCpuStat().getTimeThrottledNanos(), equalTo(139298645489L)); + assertThat(cgroup.getMemoryLimitInBytes(), equalTo("18446744073709551615")); + assertThat(cgroup.getMemoryUsageInBytes(), equalTo("4796416")); } else { assertNull(cgroup); } diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java index f1e2371db5cb7..0f05e62358976 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java @@ -42,7 +42,10 @@ public void testSerialization() throws IOException { randomAlphaOfLength(8), randomNonNegativeLong(), randomNonNegativeLong(), - new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())); + new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), + randomAlphaOfLength(8), + Long.toString(randomNonNegativeLong()), + Long.toString(randomNonNegativeLong())); OsStats osStats = new OsStats(System.currentTimeMillis(), cpu, mem, swap, cgroup); try (BytesStreamOutput out = new BytesStreamOutput()) { @@ -70,6 +73,8 @@ public void testSerialization() throws IOException { assertEquals( osStats.getCgroup().getCpuStat().getTimeThrottledNanos(), deserializedOsStats.getCgroup().getCpuStat().getTimeThrottledNanos()); + assertEquals(osStats.getCgroup().getMemoryLimitInBytes(), deserializedOsStats.getCgroup().getMemoryLimitInBytes()); + assertEquals(osStats.getCgroup().getMemoryUsageInBytes(), deserializedOsStats.getCgroup().getMemoryUsageInBytes()); } } } diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 1a357c55eb056..df18b00528c66 100644 --- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.search.MultiValueMode.UnsortedNumericDoubleValues; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -92,7 +91,7 @@ public void testSingleValuedLongs() throws Exception { docsWithValue.set(i); } } - + final Supplier multiValues = () -> DocValues.singleton(new AbstractNumericDocValues() { int docId = -1; @Override @@ -711,126 +710,6 @@ private void verifySortedSet(Supplier supplier, int maxDoc, } } - public void testUnsortedSingleValuedDoubles() throws Exception { - final int numDocs = scaledRandomIntBetween(1, 100); - final double[] array = new double[numDocs]; - final FixedBitSet docsWithValue = randomBoolean() ? null : new FixedBitSet(numDocs); - for (int i = 0; i < array.length; ++i) { - if (randomBoolean()) { - array[i] = randomDouble(); - if (docsWithValue != null) { - docsWithValue.set(i); - } - } else if (docsWithValue != null && randomBoolean()) { - docsWithValue.set(i); - } - } - final NumericDoubleValues singleValues = new NumericDoubleValues() { - private int docID; - @Override - public boolean advanceExact(int doc) throws IOException { - docID = doc; - return docsWithValue == null || docsWithValue.get(docID); - } - @Override - public double doubleValue() { - return array[docID]; - } - }; - final SortedNumericDoubleValues singletonValues = FieldData.singleton(singleValues); - final MultiValueMode.UnsortedNumericDoubleValues multiValues = new MultiValueMode.UnsortedNumericDoubleValues() { - - @Override - public int docValueCount() { - return singletonValues.docValueCount(); - } - - @Override - public boolean advanceExact(int doc) throws IOException { - return singletonValues.advanceExact(doc); - } - - @Override - public double nextValue() throws IOException { - return Math.cos(singletonValues.nextValue()); - } - }; - verifyUnsortedNumeric(() -> multiValues, numDocs); - } - - public void testUnsortedMultiValuedDoubles() throws Exception { - final int numDocs = scaledRandomIntBetween(1, 100); - final double[][] array = new double[numDocs][]; - for (int i = 0; i < numDocs; ++i) { - final double[] values = new double[randomInt(4)]; - for (int j = 0; j < values.length; ++j) { - values[j] = randomDouble(); - } - Arrays.sort(values); - array[i] = values; - } - final MultiValueMode.UnsortedNumericDoubleValues multiValues = new MultiValueMode.UnsortedNumericDoubleValues() { - int doc; - int i; - - @Override - public int docValueCount() { - return array[doc].length; - } - - @Override - public boolean advanceExact(int doc) { - this.doc = doc; - i = 0; - return array[doc].length > 0; - } - - @Override - public double nextValue() { - return Math.sin(array[doc][i++]); - } - }; - verifyUnsortedNumeric(() -> multiValues, numDocs); - } - - private void verifyUnsortedNumeric(Supplier supplier, int maxDoc) throws IOException { - for (double missingValue : new double[] { 0, randomDouble() }) { - for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { - UnsortedNumericDoubleValues values = supplier.get(); - final NumericDoubleValues selected = mode.select(values, missingValue); - for (int i = 0; i < maxDoc; ++i) { - assertTrue(selected.advanceExact(i)); - final double actual = selected.doubleValue(); - double expected = 0.0; - if (values.advanceExact(i) == false) { - expected = missingValue; - } else { - int numValues = values.docValueCount(); - if (mode == MultiValueMode.MAX) { - expected = Long.MIN_VALUE; - } else if (mode == MultiValueMode.MIN) { - expected = Long.MAX_VALUE; - } - for (int j = 0; j < numValues; ++j) { - if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { - expected += values.nextValue(); - } else if (mode == MultiValueMode.MIN) { - expected = Math.min(expected, values.nextValue()); - } else if (mode == MultiValueMode.MAX) { - expected = Math.max(expected, values.nextValue()); - } - } - if (mode == MultiValueMode.AVG) { - expected = expected/numValues; - } - } - - assertEquals(mode.toString() + " docId=" + i, expected, actual, 0.1); - } - } - } - } - public void testValidOrdinals() { assertThat(MultiValueMode.SUM.ordinal(), equalTo(0)); assertThat(MultiValueMode.AVG.ordinal(), equalTo(1)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index af826a7d7900e..8c383e799fee5 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -23,12 +23,15 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -63,6 +66,27 @@ protected InternalHistogram createTestInstance(String name, return new InternalHistogram(name, buckets, order, 1, null, format, keyed, pipelineAggregators, metaData); } + // issue 26787 + public void testHandlesNaN() { + InternalHistogram histogram = createTestInstance(); + InternalHistogram histogram2 = createTestInstance(); + List buckets = histogram.getBuckets(); + if (buckets == null || buckets.isEmpty()) { + return; + } + + // Set the key of one bucket to NaN. Must be the last bucket because NaN is greater than everything else. + List newBuckets = new ArrayList<>(buckets.size()); + if (buckets.size() > 1) { + newBuckets.addAll(buckets.subList(0, buckets.size() - 1)); + } + InternalHistogram.Bucket b = buckets.get(buckets.size() - 1); + newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, keyed, b.format, b.aggregations)); + + InternalHistogram newHistogram = histogram.create(newBuckets); + newHistogram.doReduce(Arrays.asList(newHistogram, histogram2), new InternalAggregation.ReduceContext(null, null, false)); + } + @Override protected void assertReduced(InternalHistogram reduced, List inputs) { Map expectedCounts = new TreeMap<>(); diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index 5b68d3cd1f1f6..339240c15b94c 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -107,7 +107,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe .put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate) .put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate) .put(MockEngineSupport.WRAP_READER_RATIO.getKey(), 1.0d); - logger.info("creating index: [test] using settings: [{}]", settings.build().getAsMap()); + logger.info("creating index: [test] using settings: [{}]", settings.build()); assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type", mapping, XContentType.JSON)); diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index dbba84f86ff08..835b980d6653e 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -90,7 +90,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc if (createIndexWithoutErrors) { Settings.Builder settings = Settings.builder() .put("index.number_of_replicas", numberOfReplicas()); - logger.info("creating index: [test] using settings: [{}]", settings.build().getAsMap()); + logger.info("creating index: [test] using settings: [{}]", settings.build()); client().admin().indices().prepareCreate("test") .setSettings(settings) .addMapping("type", mapping, XContentType.JSON).execute().actionGet(); @@ -112,7 +112,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING.getKey(), exceptionRate) .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate); // we cannot expect that the index will be valid - logger.info("creating index: [test] using settings: [{}]", settings.build().getAsMap()); + logger.info("creating index: [test] using settings: [{}]", settings.build()); client().admin().indices().prepareCreate("test") .setSettings(settings) .addMapping("type", mapping, XContentType.JSON).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 43c6018d8f896..4582567705138 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; -import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -242,7 +241,7 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti return new ScorerSupplier() { @Override - public Scorer get(boolean randomAccess) throws IOException { + public Scorer get(long loadCost) throws IOException { throw new UnsupportedOperationException(); } diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index 2179444aad763..edcfdc2155507 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -19,6 +19,11 @@ package org.elasticsearch.search.searchafter; +import org.apache.lucene.document.LatLonDocValuesField; +import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; @@ -27,13 +32,16 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collections; +import static org.elasticsearch.search.searchafter.SearchAfterBuilder.extractSortType; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.hamcrest.Matchers.equalTo; public class SearchAfterBuilderTests extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; @@ -182,7 +190,7 @@ public void testWithNullArray() throws Exception { builder.setSortValues(null); fail("Should fail on null array."); } catch (NullPointerException e) { - assertThat(e.getMessage(), Matchers.equalTo("Values cannot be null.")); + assertThat(e.getMessage(), equalTo("Values cannot be null.")); } } @@ -192,7 +200,7 @@ public void testWithEmptyArray() throws Exception { builder.setSortValues(new Object[0]); fail("Should fail on empty array."); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), Matchers.equalTo("Values must contains at least one value.")); + assertThat(e.getMessage(), equalTo("Values must contains at least one value.")); } } @@ -215,4 +223,29 @@ private static void randomSearchFromBuilderWithSortValueThrows(Object containing Exception e = expectThrows(IllegalArgumentException.class, () -> builder.setSortValues(values)); assertEquals(e.getMessage(), "Can't handle search_after field value of type [" + containing.getClass() + "]"); } + + public void testExtractSortType() throws Exception { + SortField.Type type = extractSortType(LatLonDocValuesField.newDistanceSort("field", 0.0, 180.0)); + assertThat(type, equalTo(SortField.Type.DOUBLE)); + IndexFieldData.XFieldComparatorSource source = new IndexFieldData.XFieldComparatorSource(null, MultiValueMode.MIN, null) { + @Override + public SortField.Type reducedType() { + return SortField.Type.STRING; + } + + @Override + public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { + return null; + } + }; + + type = extractSortType(new SortField("field", source)); + assertThat(type, equalTo(SortField.Type.STRING)); + + type = extractSortType(new SortedNumericSortField("field", SortField.Type.DOUBLE)); + assertThat(type, equalTo(SortField.Type.DOUBLE)); + + type = extractSortType(new SortedSetSortField("field", false)); + assertThat(type, equalTo(SortField.Type.STRING)); + } } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 5d083d356f2a9..5883d3a5645cd 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1949,7 +1949,7 @@ public void testRecreateBlocksOnRestore() throws Exception { initialSettingsBuilder.put(blockSetting, true); } Settings initialSettings = initialSettingsBuilder.build(); - logger.info("--> using initial block settings {}", initialSettings.getAsMap()); + logger.info("--> using initial block settings {}", initialSettings); if (!initialSettings.isEmpty()) { logger.info("--> apply initial blocks to index"); @@ -1978,7 +1978,7 @@ public void testRecreateBlocksOnRestore() throws Exception { changedSettingsBuilder.put(blockSetting, randomBoolean()); } Settings changedSettings = changedSettingsBuilder.build(); - logger.info("--> applying changed block settings {}", changedSettings.getAsMap()); + logger.info("--> applying changed block settings {}", changedSettings); RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster() .prepareRestoreSnapshot("test-repo", "test-snap") @@ -1992,7 +1992,7 @@ public void testRecreateBlocksOnRestore() throws Exception { .put(initialSettings) .put(changedSettings) .build(); - logger.info("--> merged block settings {}", mergedSettings.getAsMap()); + logger.info("--> merged block settings {}", mergedSettings); logger.info("--> checking consistency between settings and blocks"); assertThat(mergedSettings.getAsBoolean(IndexMetaData.SETTING_BLOCKS_METADATA, false), diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index cb2741a3d0e86..b6a6f91d232c4 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,7 +1,7 @@ :version: 7.0.0-alpha1 :major-version: 7.x -:lucene_version: 7.0.0 -:lucene_version_path: 7_0_0 +:lucene_version: 7.1.0-SNAPSHOT +:lucene_version_path: 7_1_0 :branch: master :jdk: 1.8.0_131 diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 5f5de5eb1a908..10430adc9a6f3 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -188,51 +188,6 @@ it to the `transformers`: -------------------------------------------------- - -== Deploying in JBoss EAP6 module - -Elasticsearch and Lucene classes need to be in the same JBoss module. - -You should define a `module.xml` file like this: - -[source,xml] --------------------------------------------------- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - --------------------------------------------------- - - include::client.asciidoc[] include::docs.asciidoc[] diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 65cc30780b1a7..1ce44b6028db8 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -6,6 +6,7 @@ following types are supported: <>, <>, <>, +<>, <>, <>, <>, @@ -55,7 +56,7 @@ functionality is implemented by adding the with the `keywords` set to the value of the `stem_exclusion` parameter. The following analyzers support setting custom `stem_exclusion` list: -`arabic`, `armenian`, `basque`, `bulgarian`, `catalan`, `czech`, +`arabic`, `armenian`, `basque`, `bengali`, `bulgarian`, `catalan`, `czech`, `dutch`, `english`, `finnish`, `french`, `galician`, `german`, `hindi`, `hungarian`, `indonesian`, `irish`, `italian`, `latvian`, `lithuanian`, `norwegian`, `portuguese`, `romanian`, `russian`, `sorani`, @@ -209,6 +210,54 @@ PUT /armenian_example <2> This filter should be removed unless there are words which should be excluded from stemming. +[[bengali-analyzer]] +===== `bengali` analyzer + +The `bengali` analyzer could be reimplemented as a `custom` analyzer as follows: + +[source,js] +---------------------------------------------------- +PUT /bengali_example +{ + "settings": { + "analysis": { + "filter": { + "bengali_stop": { + "type": "stop", + "stopwords": "_bengali_" <1> + }, + "bengali_keywords": { + "type": "keyword_marker", + "keywords": ["উদাহরণ"] <2> + }, + "bengali_stemmer": { + "type": "stemmer", + "language": "bengali" + } + }, + "analyzer": { + "bengali": { + "tokenizer": "standard", + "filter": [ + "lowercase", + "indic_normalization", + "bengali_normalization", + "bengali_stop", + "bengali_keywords", + "bengali_stemmer" + ] + } + } + } + } +} +---------------------------------------------------- +// CONSOLE +<1> The default stopwords can be overridden with the `stopwords` + or `stopwords_path` parameters. +<2> This filter should be removed unless there are words which should + be excluded from stemming. + [[brazilian-analyzer]] ===== `brazilian` analyzer diff --git a/docs/reference/analysis/tokenfilters/length-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/length-tokenfilter.asciidoc index 2651980966e95..e53a198df5570 100644 --- a/docs/reference/analysis/tokenfilters/length-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/length-tokenfilter.asciidoc @@ -11,6 +11,6 @@ type: |=========================================================== |Setting |Description |`min` |The minimum number. Defaults to `0`. -|`max` |The maximum number. Defaults to `Integer.MAX_VALUE`. +|`max` |The maximum number. Defaults to `Integer.MAX_VALUE`, which is `2^31-1` or 2147483647. |=========================================================== diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index a052a4a7a5877..a13c6746d74be 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -44,6 +44,10 @@ Basque:: http://snowball.tartarus.org/algorithms/basque/stemmer.html[*`basque`*] +Bengali:: +http://www.tandfonline.com/doi/abs/10.1080/02564602.1993.11437284[*`bengali`*] +http://members.unine.ch/jacques.savoy/clef/BengaliStemmerLight.java.txt[*`light_bengali`*] + Brazilian Portuguese:: http://lucene.apache.org/core/4_9_0/analyzers-common/org/apache/lucene/analysis/br/BrazilianStemmer.html[*`brazilian`*] diff --git a/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc index b20f9c9418dc7..3167a4342ac2d 100644 --- a/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc @@ -71,7 +71,7 @@ PUT /my_index Elasticsearch provides the following predefined list of languages: -`_arabic_`, `_armenian_`, `_basque_`, `_brazilian_`, `_bulgarian_`, +`_arabic_`, `_armenian_`, `_basque_`, `_bengali_`, `_brazilian_`, `_bulgarian_`, `_catalan_`, `_czech_`, `_danish_`, `_dutch_`, `_english_`, `_finnish_`, `_french_`, `_galician_`, `_german_`, `_greek_`, `_hindi_`, `_hungarian_`, `_indonesian_`, `_irish_`, `_italian_`, `_latvian_`, `_norwegian_`, `_persian_`, diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index 4d748d31559ba..40c02cf35aa09 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -255,6 +255,25 @@ the operating system: The total amount of time (in nanoseconds) for which all tasks in the same cgroup as the Elasticsearch process have been throttled. +`os.cgroup.memory.control_group` (Linux only):: + The `memory` control group to which the Elasticsearch process + belongs + +`os.cgroup.memory.limit_in_bytes` (Linux only):: + The maximum amount of user memory (including file cache) allowed + for all tasks in the same cgroup as the Elasticsearch process. + This value can be too big to store in a `long`, so is returned as + a string so that the value returned can exactly match what the + underlying operating system interface returns. Any value that is + too large to parse into a `long` almost certainly means no limit + has been set for the cgroup. + +`os.cgroup.memory.usage_in_bytes` (Linux only):: + The total current memory usage by processes in the cgroup (in bytes) + by all tasks in the same cgroup as the Elasticsearch process. + This value is stored as a string for consistency with + `os.cgroup.memory.limit_in_bytes`. + NOTE: For the cgroup stats to be visible, cgroups must be compiled into the kernal, the `cpu` and `cpuacct` cgroup subsystems must be configured and stats must be readable from `/sys/fs/cgroup/cpu` diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index 668857ed41e4d..db7479f9f7d38 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -114,6 +114,13 @@ The default is `10%` which is often plenty: for example, if you give the JVM 10GB of memory, it will give 1GB to the index buffer, which is enough to host two shards that are heavily indexing. +[float] +=== Disable `_field_names` + +The <> introduces some +index-time overhead, so you might want to disable it if you never need to +run `exists` queries. + [float] === Additional optimizations diff --git a/docs/reference/mapping/fields/field-names-field.asciidoc b/docs/reference/mapping/fields/field-names-field.asciidoc index 45839ac55d950..9dd1f17cbb3a9 100644 --- a/docs/reference/mapping/fields/field-names-field.asciidoc +++ b/docs/reference/mapping/fields/field-names-field.asciidoc @@ -35,3 +35,25 @@ GET my_index/_search // CONSOLE <1> Querying on the `_field_names` field (also see the <> query) + + +==== Disabling `_field_names` + +Because `_field_names` introduce some index-time overhead, you might want to +disable this field if you want to optimize for indexing speed and do not need +`exists` queries. + +[source,js] +-------------------------------------------------- +PUT tweets +{ + "mappings": { + "tweet": { + "_field_names": { + "enabled": false + } + } + } +} +-------------------------------------------------- +// CONSOLE diff --git a/docs/reference/mapping/fields/type-field.asciidoc b/docs/reference/mapping/fields/type-field.asciidoc index f2ff9631634eb..d5b098245999a 100644 --- a/docs/reference/mapping/fields/type-field.asciidoc +++ b/docs/reference/mapping/fields/type-field.asciidoc @@ -1,7 +1,7 @@ [[mapping-type-field]] === `_type` field -deprecated::[6.0.0,See <>] +deprecated[6.0.0,See <>] Each document indexed is associated with a <> (see <>) and an <>. The `_type` field is diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index b9ff8dc407e5b..7b891c579beaa 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -104,10 +104,12 @@ To create a dedicated master-eligible node, set: node.master: true <1> node.data: false <2> node.ingest: false <3> +search.remote.connect: false <4> ------------------- <1> The `node.master` role is enabled by default. <2> Disable the `node.data` role (enabled by default). <3> Disable the `node.ingest` role (enabled by default). +<4> Disable cross-cluster search (enabled by default). ifdef::include-xpack[] NOTE: These settings apply only when {xpack} is not installed. To create a @@ -194,10 +196,12 @@ To create a dedicated data node, set: node.master: false <1> node.data: true <2> node.ingest: false <3> +search.remote.connect: false <4> ------------------- <1> Disable the `node.master` role (enabled by default). <2> The `node.data` role is enabled by default. <3> Disable the `node.ingest` role (enabled by default). +<4> Disable cross-cluster search (enabled by default). ifdef::include-xpack[] NOTE: These settings apply only when {xpack} is not installed. To create a diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index 64e9e2e1663aa..d269ce0456763 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -1,39 +1,55 @@ [[modules-snapshots]] == Snapshot And Restore -The snapshot and restore module allows to create snapshots of individual -indices or an entire cluster into a remote repository like shared file system, -S3, or HDFS. These snapshots are great for backups because they can be restored -relatively quickly but they are not archival because they can only be restored -to versions of Elasticsearch that can read the index. That means that: +You can store snapshots of individual indices or an entire cluster in +a remote repository like a shared file system, S3, or HDFS. These snapshots +are great for backups because they can be restored relatively quickly. However, +snapshots can only be restored to versions of Elasticsearch that can read the +indices: +* A snapshot of an index created in 5.x can be restored to 6.x. * A snapshot of an index created in 2.x can be restored to 5.x. * A snapshot of an index created in 1.x can be restored to 2.x. -* A snapshot of an index created in 1.x can **not** be restored to 5.x. - -To restore a snapshot of an index created in 1.x to 5.x you can restore it to -a 2.x cluster and use <> to rebuild -the index in a 5.x cluster. This is as time consuming as restoring from -archival copies of the original data. - -Note: If a repository is connected to a 2.x cluster, and you want to connect -a 5.x cluster to the same repository, you will have to either first set the 2.x -repository to `readonly` mode (see below for details on `readonly` mode) or create -the 5.x repository in `readonly` mode. A 5.x cluster will update the repository -to conform to 5.x specific formats, which will mean that any new snapshots written -via the 2.x cluster will not be visible to the 5.x cluster, and vice versa. -In fact, as a general rule, only one cluster should connect to the same repository -location with write access; all other clusters connected to the same repository -should be set to `readonly` mode. While setting all but one repositories to -`readonly` should work with multiple clusters differing by one major version, -it is not a supported configuration. +Conversely, snapshots of indices created in 1.x **cannot** be restored to +5.x or 6.x, and snapshots of indices created in 2.x **cannot** be restored +to 6.x. + +Snapshots are incremental and can contain indices created in various +versions of Elasticsearch. If any indices in a snapshot were created in an +incompatible version, you will not be able restore the snapshot. + +IMPORTANT: When backing up your data prior to an upgrade, keep in mind that you +won't be able to restore snapshots after you upgrade if they contain indices +created in a version that's incompatible with the upgrade version. + +If you end up in a situation where you need to restore a snapshot of an index +that is incompatible with the version of the cluster you are currently running, +you can restore it on the latest compatible version and use +<> to rebuild the index on the current +version. Reindexing from remote is only possible if the original index has +source enabled. Retrieving and reindexing the data can take significantly longer +than simply restoring a snapshot. If you have a large amount of data, we +recommend testing the reindex from remote process with a subset of your data to +understand the time requirements before proceeding. [float] === Repositories -Before any snapshot or restore operation can be performed, a snapshot repository should be registered in -Elasticsearch. The repository settings are repository-type specific. See below for details. +You must register a snapshot repository before you can perform snapshot and +restore operations. We recommend creating a new snapshot repository for each +major version. The valid repository settings depend on the repository type. + +If you register same snapshot repository with multiple clusters, only +one cluster should have write access to the repository. All other clusters +connected to that repository should set the repository to `readonly` mode. + +NOTE: The snapshot format can change across major versions, so if you have +clusters on different major versions trying to write the same repository, +new snapshots written by one version will not be visible to the other. While +setting the repository to `readonly` on all but one of the clusters should work +with multiple clusters differing by one major version, it is not a supported +configuration. [source,js] ----------------------------------- @@ -48,7 +64,7 @@ PUT /_snapshot/my_backup // CONSOLE // TESTSETUP -Once a repository is registered, its information can be obtained using the following command: +To retrieve information about a registered repository, use a GET request: [source,js] ----------------------------------- @@ -71,9 +87,11 @@ which returns: ----------------------------------- // TESTRESPONSE -Information about multiple repositories can be fetched in one go by using a comma-delimited list of repository names. -Star wildcards are supported as well. For example, information about repositories that start with `repo` or that contain `backup` -can be obtained using the following command: +To retrieve information about multiple repositories, specify a +a comma-delimited list of repositories. You can also use the * wildcard when +specifying repository names. For example, the following request retrieves +information about all of the snapshot repositories that start with `repo` or +contain `backup`: [source,js] ----------------------------------- @@ -81,8 +99,8 @@ GET /_snapshot/repo*,*backup* ----------------------------------- // CONSOLE -If a repository name is not specified, or `_all` is used as repository name Elasticsearch will return information about -all repositories currently registered in the cluster: +To retrieve information about all registered snapshot repositories, omit the +repository name or specify `_all`: [source,js] ----------------------------------- diff --git a/docs/reference/query-dsl/fuzzy-query.asciidoc b/docs/reference/query-dsl/fuzzy-query.asciidoc index 70f7eb48ada7f..eb3fc9178506c 100644 --- a/docs/reference/query-dsl/fuzzy-query.asciidoc +++ b/docs/reference/query-dsl/fuzzy-query.asciidoc @@ -63,6 +63,10 @@ GET /_search The maximum number of terms that the `fuzzy` query will expand to. Defaults to `50`. +`transpositions`:: + + Whether fuzzy transpositions (`ab` -> `ba`) are supported. + Default is `false`. WARNING: This query can be very heavy if `prefix_length` is set to `0` and if `max_expansions` is set to a high number. It could result in every term in the diff --git a/docs/reference/query-dsl/multi-match-query.asciidoc b/docs/reference/query-dsl/multi-match-query.asciidoc index 48c3f77d3cb9f..6f6783422486b 100644 --- a/docs/reference/query-dsl/multi-match-query.asciidoc +++ b/docs/reference/query-dsl/multi-match-query.asciidoc @@ -137,7 +137,8 @@ follows: Also, accepts `analyzer`, `boost`, `operator`, `minimum_should_match`, `fuzziness`, `lenient`, `prefix_length`, `max_expansions`, `rewrite`, `zero_terms_query`, - `cutoff_frequency` and `auto_generate_synonyms_phrase_query`, as explained in <>. + `cutoff_frequency`, `auto_generate_synonyms_phrase_query` and `fuzzy_transpositions`, + as explained in <>. [IMPORTANT] [[operator-min]] diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index f42a3b09f9b6e..29fe70adb2e10 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -83,6 +83,9 @@ to `AUTO`. See <> for allowed settings. |`fuzzy_prefix_length` |Set the prefix length for fuzzy queries. Default is `0`. +|`fuzzy_transpositions` |Set to `false` to disable fuzzy transpositions (`ab` -> `ba`). +Default is `true`. + |`phrase_slop` |Sets the default slop for phrases. If zero, then exact phrase matches are required. Default value is `0`. diff --git a/docs/reference/query-dsl/query-string-syntax.asciidoc b/docs/reference/query-dsl/query-string-syntax.asciidoc index 8a76d686b623a..8a7b394b2e870 100644 --- a/docs/reference/query-dsl/query-string-syntax.asciidoc +++ b/docs/reference/query-dsl/query-string-syntax.asciidoc @@ -53,6 +53,25 @@ Be aware that wildcard queries can use an enormous amount of memory and perform very badly -- just think how many terms need to be queried to match the query string `"a* b* c*"`. +[WARNING] +======= +Pure wildcards `\*` are rewritten to <> queries for efficiency. +As a consequence, the wildcard `"field:*"` would match documents with an emtpy value + like the following: +``` +{ + "field": "" +} +``` +\... and would **not** match if the field is missing or set with an explicit null +value like the following: +``` +{ + "field": null +} +``` +======= + [WARNING] ======= Allowing a wildcard at the beginning of a word (eg `"*ing"`) is particularly diff --git a/docs/reference/query-dsl/simple-query-string-query.asciidoc b/docs/reference/query-dsl/simple-query-string-query.asciidoc index 1f887cf631155..99fbc131c1be3 100644 --- a/docs/reference/query-dsl/simple-query-string-query.asciidoc +++ b/docs/reference/query-dsl/simple-query-string-query.asciidoc @@ -70,6 +70,15 @@ Defaults to `true`. |`all_fields` | deprecated[6.0.0, set `fields` to `*` instead] Perform the query on all fields detected in the mapping that can be queried. + +|`fuzzy_prefix_length` |Set the prefix length for fuzzy queries. Default +is `0`. + +|`fuzzy_max_expansions` |Controls the number of terms fuzzy queries will +expand to. Defaults to `50` + +|`fuzzy_transpositions` |Set to `false` to disable fuzzy transpositions (`ab` -> `ba`). +Default is `true`. |======================================================================= [float] diff --git a/docs/reference/setup/sysconfig/file-descriptors.asciidoc b/docs/reference/setup/sysconfig/file-descriptors.asciidoc index f4bc95749ae23..17e7884be0d33 100644 --- a/docs/reference/setup/sysconfig/file-descriptors.asciidoc +++ b/docs/reference/setup/sysconfig/file-descriptors.asciidoc @@ -16,6 +16,9 @@ For the `.zip` and `.tar.gz` packages, set <> as root before starting Elasticsearch, or set `nofile` to `65536` in <>. +On macOS, you must also pass the JVM option `-XX:-MaxFDLimit` +to Elasticsearch in order for it to make use of the higher file descriptor limit. + RPM and Debian packages already default the maximum number of file descriptors to 65536 and do not require further configuration. diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliNormalizationFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliNormalizationFilterFactory.java new file mode 100644 index 0000000000000..fbec142bf3c1b --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliNormalizationFilterFactory.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.bn.BengaliNormalizationFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import org.elasticsearch.index.analysis.MultiTermAwareComponent; + +/** + * Factory for {@link BengaliNormalizationFilter} + */ +public class BengaliNormalizationFilterFactory extends AbstractTokenFilterFactory implements MultiTermAwareComponent { + + BengaliNormalizationFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + super(indexSettings, name, settings); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return new BengaliNormalizationFilter(tokenStream); + } + + @Override + public Object getMultiTermComponent() { + return this; + } +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index b94c4f1c79095..813075fa73f06 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -25,6 +25,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; import org.apache.lucene.analysis.ar.ArabicStemFilter; +import org.apache.lucene.analysis.bn.BengaliNormalizationFilter; import org.apache.lucene.analysis.br.BrazilianStemFilter; import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter; import org.apache.lucene.analysis.cjk.CJKBigramFilter; @@ -94,6 +95,7 @@ public Map> getTokenFilters() { filters.put("arabic_normalization", ArabicNormalizationFilterFactory::new); filters.put("arabic_stem", ArabicStemTokenFilterFactory::new); filters.put("asciifolding", ASCIIFoldingTokenFilterFactory::new); + filters.put("bengali_normalization", BengaliNormalizationFilterFactory::new); filters.put("brazilian_stem", BrazilianStemTokenFilterFactory::new); filters.put("cjk_bigram", CJKBigramFilterFactory::new); filters.put("cjk_width", CJKWidthFilterFactory::new); @@ -180,6 +182,7 @@ public List getPreConfiguredTokenFilters() { filters.add(PreConfiguredTokenFilter.singleton("arabic_normalization", true, ArabicNormalizationFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("arabic_stem", false, ArabicStemFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("asciifolding", true, ASCIIFoldingFilter::new)); + filters.add(PreConfiguredTokenFilter.singleton("bengali_normalization", true, BengaliNormalizationFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("brazilian_stem", false, BrazilianStemFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("cjk_bigram", false, CJKBigramFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("cjk_width", true, CJKWidthFilter::new)); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index c94a449afd2c1..630a6a6ebeca4 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -22,6 +22,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicStemFilter; import org.apache.lucene.analysis.bg.BulgarianStemFilter; +import org.apache.lucene.analysis.bn.BengaliStemFilter; import org.apache.lucene.analysis.br.BrazilianStemFilter; import org.apache.lucene.analysis.ckb.SoraniStemFilter; import org.apache.lucene.analysis.cz.CzechStemFilter; @@ -102,6 +103,8 @@ public TokenStream create(TokenStream tokenStream) { return new SnowballFilter(tokenStream, new ArmenianStemmer()); } else if ("basque".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new BasqueStemmer()); + } else if ("bengali".equalsIgnoreCase(language)) { + return new BengaliStemFilter(tokenStream); } else if ("brazilian".equalsIgnoreCase(language)) { return new BrazilianStemFilter(tokenStream); } else if ("bulgarian".equalsIgnoreCase(language)) { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java index 22d34f218e0f2..707930277e7a2 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java @@ -67,6 +67,7 @@ protected Map> getTokenFilters() { filters.put("uppercase", UpperCaseTokenFilterFactory.class); filters.put("ngram", NGramTokenFilterFactory.class); filters.put("edgengram", EdgeNGramTokenFilterFactory.class); + filters.put("bengalistem", StemmerTokenFilterFactory.class); filters.put("bulgarianstem", StemmerTokenFilterFactory.class); filters.put("englishminimalstem", StemmerTokenFilterFactory.class); filters.put("englishpossessive", StemmerTokenFilterFactory.class); @@ -106,6 +107,7 @@ protected Map> getTokenFilters() { filters.put("patternreplace", PatternReplaceTokenFilterFactory.class); filters.put("patterncapturegroup", PatternCaptureGroupTokenFilterFactory.class); filters.put("arabicnormalization", ArabicNormalizationFilterFactory.class); + filters.put("bengalinormalization", BengaliNormalizationFilterFactory.class); filters.put("germannormalization", GermanNormalizationFilterFactory.class); filters.put("hindinormalization", HindiNormalizationFilterFactory.class); filters.put("indicnormalization", IndicNormalizationFilterFactory.class); @@ -159,6 +161,7 @@ protected Map> getPreConfiguredTokenFilters() { filters.put("arabic_normalization", null); filters.put("arabic_stem", null); filters.put("asciifolding", null); + filters.put("bengali_normalization", null); filters.put("brazilian_stem", null); filters.put("cjk_bigram", null); filters.put("cjk_width", null); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java new file mode 100644 index 0000000000000..081580a6ae93a --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.util.Collection; +import java.util.Collections; + +public class MassiveWordListTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singleton(CommonAnalysisPlugin.class); + } + + public void testCreateIndexWithMassiveWordList() { + String[] wordList = new String[100000]; + for (int i = 0; i < wordList.length; i++) { + wordList[i] = "hello world"; + } + client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", 1) + .put("analysis.analyzer.test_analyzer.type", "custom") + .put("analysis.analyzer.test_analyzer.tokenizer", "standard") + .putArray("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase") + .put("analysis.filter.dictionary_decompounder.type", "dictionary_decompounder") + .putArray("analysis.filter.dictionary_decompounder.word_list", wordList) + ).get(); + } +} diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index abd9a3f5ae0d9..6ff3b8c802735 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -27,3 +27,13 @@ - match: { detail.analyzer.tokens.1.start_offset: 4 } - match: { detail.analyzer.tokens.1.end_offset: 8 } - match: { detail.analyzer.tokens.1.position: 1 } + +--- +"bengali": + - do: + indices.analyze: + body: + text: বাড়ী + analyzer: bengali + - length: { tokens: 1 } + - match: { tokens.0.token: বার } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml index 95638025f0bca..47eb436788abf 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml @@ -695,6 +695,37 @@ - length: { tokens: 1 } - match: { tokens.0.token: اجن } +--- +"bengali_normalization": + - do: + indices.create: + index: test + body: + settings: + analysis: + filter: + my_bengali_normalization: + type: bengali_normalization + - do: + indices.analyze: + index: test + body: + text: চাঁদ + tokenizer: keyword + filter: [my_bengali_normalization] + - length: { tokens: 1 } + - match: { tokens.0.token: চাদ } + + # Test pre-configured token filter too: + - do: + indices.analyze: + body: + text: চাঁদ + tokenizer: keyword + filter: [bengali_normalization] + - length: { tokens: 1 } + - match: { tokens.0.token: চাদ } + --- "german_normalization": - do: @@ -1475,3 +1506,26 @@ filter: [russian_stem] - length: { tokens: 1 } - match: { tokens.0.token: журнал } + +--- +"bengali_stem": + - do: + indices.create: + index: test + body: + settings: + analysis: + filter: + my_bengali_stem: + type: stemmer + language: bengali + + - do: + indices.analyze: + index: test + body: + text: করেছিলাম + tokenizer: keyword + filter: [my_bengali_stem] + - length: { tokens: 1 } + - match: { tokens.0.token: কর } diff --git a/modules/lang-expression/licenses/lucene-NOTICE.txt b/modules/lang-expression/licenses/lucene-NOTICE.txt index ecf08201a5ee6..1a1d51572432a 100644 --- a/modules/lang-expression/licenses/lucene-NOTICE.txt +++ b/modules/lang-expression/licenses/lucene-NOTICE.txt @@ -54,13 +54,14 @@ The KStem stemmer in was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) under the BSD-license. -The Arabic,Persian,Romanian,Bulgarian, and Hindi analyzers (common) come with a default +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, -analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt See http://members.unine.ch/jacques.savoy/clef/index.html. The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers diff --git a/modules/lang-expression/licenses/lucene-expressions-7.0.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.0.0.jar.sha1 deleted file mode 100644 index a13f66c451578..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cba375c000fe184e45d439e61162c1111b77c907 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..3c02bb4144b29 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +3d20e7c480ff31a6e0a74c57770a1cfdfd56cf0c \ No newline at end of file diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java index bece1751a46c3..dbbb98af65af8 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java @@ -20,7 +20,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.OrdinalMap; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -379,12 +379,12 @@ private Query parentFieldDoToQuery(QueryShardContext context) throws IOException /** * A query that rewrites into another query using - * {@link JoinUtil#createJoinQuery(String, Query, Query, IndexSearcher, ScoreMode, MultiDocValues.OrdinalMap, int, int)} + * {@link JoinUtil#createJoinQuery(String, Query, Query, IndexSearcher, ScoreMode, OrdinalMap, int, int)} * that executes the actual join. * * This query is exclusively used by the {@link HasChildQueryBuilder} and {@link HasParentQueryBuilder} to get access - * to the {@link DirectoryReader} used by the current search in order to retrieve the {@link MultiDocValues.OrdinalMap}. - * The {@link MultiDocValues.OrdinalMap} is required by {@link JoinUtil} to execute the join. + * to the {@link DirectoryReader} used by the current search in order to retrieve the {@link OrdinalMap}. + * The {@link OrdinalMap} is required by {@link JoinUtil} to execute the join. */ // TODO: Find a way to remove this query and let doToQuery(...) just return the query from JoinUtil.createJoinQuery(...) public static final class LateParsingQuery extends Query { @@ -422,7 +422,7 @@ public Query rewrite(IndexReader reader) throws IOException { indexSearcher.setQueryCache(null); indexSearcher.setSimilarity(similarity); IndexOrdinalsFieldData indexParentChildFieldData = fieldDataJoin.loadGlobal((DirectoryReader) reader); - MultiDocValues.OrdinalMap ordinalMap = indexParentChildFieldData.getOrdinalMap(); + OrdinalMap ordinalMap = indexParentChildFieldData.getOrdinalMap(); return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren); } else { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ESLoggingHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ESLoggingHandler.java new file mode 100644 index 0000000000000..47a31f268a6a8 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ESLoggingHandler.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.logging.LogLevel; +import io.netty.handler.logging.LoggingHandler; +import io.netty.util.internal.StringUtil; +import org.elasticsearch.Version; +import org.elasticsearch.common.compress.Compressor; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.transport.TcpHeader; +import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportStatus; + +import java.io.IOException; +import java.io.UncheckedIOException; + +final class ESLoggingHandler extends LoggingHandler { + + ESLoggingHandler() { + super(LogLevel.TRACE); + } + + @Override + protected String format(final ChannelHandlerContext ctx, final String eventName, final Object arg) { + if (arg instanceof ByteBuf) { + try { + return format(ctx, eventName, (ByteBuf) arg); + } catch (final Exception e) { + // we really do not want to allow a bug in the formatting handling to escape + logger.trace("an exception occurred formatting a trace message", e); + // we are going to let this be formatted via the default formatting + return super.format(ctx, eventName, arg); + } + } else { + return super.format(ctx, eventName, arg); + } + } + + private static final int MESSAGE_LENGTH_OFFSET = TcpHeader.MARKER_BYTES_SIZE; + private static final int REQUEST_ID_OFFSET = MESSAGE_LENGTH_OFFSET + TcpHeader.MESSAGE_LENGTH_SIZE; + private static final int STATUS_OFFSET = REQUEST_ID_OFFSET + TcpHeader.REQUEST_ID_SIZE; + private static final int VERSION_ID_OFFSET = STATUS_OFFSET + TcpHeader.STATUS_SIZE; + private static final int ACTION_OFFSET = VERSION_ID_OFFSET + TcpHeader.VERSION_ID_SIZE; + + private String format(final ChannelHandlerContext ctx, final String eventName, final ByteBuf arg) throws IOException { + final int readableBytes = arg.readableBytes(); + if (readableBytes == 0) { + return super.format(ctx, eventName, arg); + } else if (readableBytes >= 2) { + final StringBuilder sb = new StringBuilder(); + sb.append(ctx.channel().toString()); + final int offset = arg.readerIndex(); + // this might be an ES message, check the header + if (arg.getByte(offset) == (byte) 'E' && arg.getByte(offset + 1) == (byte) 'S') { + if (readableBytes == TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE) { + final int length = arg.getInt(offset + MESSAGE_LENGTH_OFFSET); + if (length == TcpTransport.PING_DATA_SIZE) { + sb.append(" [ping]").append(' ').append(eventName).append(": ").append(readableBytes).append('B'); + return sb.toString(); + } + } + else if (readableBytes >= TcpHeader.HEADER_SIZE) { + // we are going to try to decode this as an ES message + final int length = arg.getInt(offset + MESSAGE_LENGTH_OFFSET); + final long requestId = arg.getLong(offset + REQUEST_ID_OFFSET); + final byte status = arg.getByte(offset + STATUS_OFFSET); + final boolean isRequest = TransportStatus.isRequest(status); + final String type = isRequest ? "request" : "response"; + final String version = Version.fromId(arg.getInt(offset + VERSION_ID_OFFSET)).toString(); + sb.append(" [length: ").append(length); + sb.append(", request id: ").append(requestId); + sb.append(", type: ").append(type); + sb.append(", version: ").append(version); + if (isRequest) { + // it looks like an ES request, try to decode the action + final int remaining = readableBytes - ACTION_OFFSET; + final ByteBuf slice = arg.slice(offset + ACTION_OFFSET, remaining); + // the stream might be compressed + try (StreamInput in = in(status, slice, remaining)) { + // the first bytes in the message is the context headers + try (ThreadContext context = new ThreadContext(Settings.EMPTY)) { + context.readHeaders(in); + } + // now we can decode the action name + sb.append(", action: ").append(in.readString()); + } + } + sb.append(']'); + sb.append(' ').append(eventName).append(": ").append(readableBytes).append('B'); + return sb.toString(); + } + } + } + // we could not decode this as an ES message, use the default formatting + return super.format(ctx, eventName, arg); + } + + private StreamInput in(final Byte status, final ByteBuf slice, final int remaining) throws IOException { + final ByteBufStreamInput in = new ByteBufStreamInput(slice, remaining); + if (TransportStatus.isCompress(status)) { + final Compressor compressor = CompressorFactory.compressor(Netty4Utils.toBytesReference(slice)); + return compressor.streamInput(in); + } else { + return in; + } + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 3b011ff4e4036..84c86bd2d770a 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -322,7 +322,8 @@ protected void sendMessage(Channel channel, BytesReference reference, ActionList } else { final Throwable cause = f.cause(); Netty4Utils.maybeDie(cause); - logger.error("write and flush on the network layer failed", cause); + logger.warn((Supplier) () -> + new ParameterizedMessage("write and flush on the network layer failed (channel: {})", channel), cause); assert cause instanceof Exception; listener.onFailure((Exception) cause); } @@ -338,7 +339,9 @@ protected void closeChannels(final List channels, boolean blocking, boo * side otherwise the client (node) initiates the TCP closing sequence which doesn't cause these issues. Setting this * by default from the beginning can have unexpected side-effects an should be avoided, our protocol is designed * in a way that clients close connection which is how it should be*/ - channel.config().setOption(ChannelOption.SO_LINGER, 0); + if (channel.isOpen()) { + channel.config().setOption(ChannelOption.SO_LINGER, 0); + } } } if (blocking) { @@ -406,6 +409,7 @@ protected class ClientChannelInitializer extends ChannelInitializer { @Override protected void initChannel(Channel ch) throws Exception { + ch.pipeline().addLast("logging", new ESLoggingHandler()); ch.pipeline().addLast("size", new Netty4SizeHeaderFrameDecoder()); // using a dot as a prefix means this cannot come from any settings parsed ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this, ".client")); @@ -429,6 +433,7 @@ protected ServerChannelInitializer(String name) { @Override protected void initChannel(Channel ch) throws Exception { + ch.pipeline().addLast("logging", new ESLoggingHandler()); ch.pipeline().addLast("open_channels", Netty4Transport.this.serverOpenChannels); ch.pipeline().addLast("size", new Netty4SizeHeaderFrameDecoder()); ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this, name)); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index d71e1ee937690..05295c1d4da4e 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -27,6 +27,7 @@ import io.netty.util.NettyRuntime; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.elasticsearch.common.Booleans; @@ -37,8 +38,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.LinkedList; import java.util.List; import java.util.Locale; +import java.util.Optional; +import java.util.Queue; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; @@ -167,7 +172,8 @@ public static void closeChannels(final Collection channels) throws IOEx * @param cause the throwable to test */ public static void maybeDie(final Throwable cause) { - if (cause instanceof Error) { + final Optional maybeError = maybeError(cause); + if (maybeError.isPresent()) { /* * Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, Netty wraps too many * invocations of user-code in try/catch blocks that swallow all throwables. This means that a rethrow here will not bubble up @@ -178,15 +184,52 @@ public static void maybeDie(final Throwable cause) { // try to log the current stack trace final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); final String formatted = Arrays.stream(stackTrace).skip(1).map(e -> "\tat " + e).collect(Collectors.joining("\n")); - ESLoggerFactory.getLogger(Netty4Utils.class).error("fatal error on the network layer\n{}", formatted); + final Logger logger = ESLoggerFactory.getLogger(Netty4Utils.class); + logger.error("fatal error on the network layer\n{}", formatted); } finally { new Thread( () -> { - throw (Error) cause; + throw maybeError.get(); }) .start(); } } } + static final int MAX_ITERATIONS = 1024; + + /** + * Unwrap the specified throwable looking for any suppressed errors or errors as a root cause of the specified throwable. + * + * @param cause the root throwable + * + * @return an optional error if one is found suppressed or a root cause in the tree rooted at the specified throwable + */ + static Optional maybeError(final Throwable cause) { + // early terminate if the cause is already an error + if (cause instanceof Error) { + return Optional.of((Error) cause); + } + + final Queue queue = new LinkedList<>(); + queue.add(cause); + int iterations = 0; + while (!queue.isEmpty()) { + iterations++; + if (iterations > MAX_ITERATIONS) { + ESLoggerFactory.getLogger(Netty4Utils.class).warn("giving up looking for fatal errors on the network layer", cause); + break; + } + final Throwable current = queue.remove(); + if (current instanceof Error) { + return Optional.of((Error) current); + } + Collections.addAll(queue, current.getSuppressed()); + if (current.getCause() != null) { + queue.add(current.getCause()); + } + } + return Optional.empty(); + } + } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java new file mode 100644 index 0000000000000..acd71749e2333 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; + +@ESIntegTestCase.ClusterScope(numDataNodes = 2) +@TestLogging(value = "org.elasticsearch.transport.netty4.ESLoggingHandler:trace") +public class ESLoggingHandlerIT extends ESNetty4IntegTestCase { + + private MockLogAppender appender; + + public void setUp() throws Exception { + super.setUp(); + appender = new MockLogAppender(); + Loggers.addAppender(Loggers.getLogger(ESLoggingHandler.class), appender); + appender.start(); + } + + public void tearDown() throws Exception { + Loggers.removeAppender(Loggers.getLogger(ESLoggingHandler.class), appender); + appender.stop(); + super.tearDown(); + } + + public void testLoggingHandler() throws IllegalAccessException { + final String writePattern = + ".*\\[length: \\d+" + + ", request id: \\d+" + + ", type: request" + + ", version: .*" + + ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + + " WRITE: \\d+B"; + final MockLogAppender.LoggingExpectation writeExpectation = + new MockLogAppender.PatternSeenEventExcpectation( + "hot threads request", ESLoggingHandler.class.getCanonicalName(), Level.TRACE, writePattern); + + final MockLogAppender.LoggingExpectation flushExpectation = + new MockLogAppender.SeenEventExpectation("flush", ESLoggingHandler.class.getCanonicalName(), Level.TRACE, "*FLUSH*"); + + final String readPattern = + ".*\\[length: \\d+" + + ", request id: \\d+" + + ", type: request" + + ", version: .*" + + ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + + " READ: \\d+B"; + + final MockLogAppender.LoggingExpectation readExpectation = + new MockLogAppender.PatternSeenEventExcpectation( + "hot threads request", ESLoggingHandler.class.getCanonicalName(), Level.TRACE, readPattern); + + appender.addExpectation(writeExpectation); + appender.addExpectation(flushExpectation); + appender.addExpectation(readExpectation); + client().admin().cluster().nodesHotThreads(new NodesHotThreadsRequest()).actionGet(); + appender.assertAllExpectationsMatched(); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java index 8372a8540b8be..43be6f0efdda0 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java @@ -22,6 +22,7 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.Unpooled; +import io.netty.handler.codec.DecoderException; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase; import org.elasticsearch.common.bytes.BytesArray; @@ -32,6 +33,9 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Optional; + +import static org.hamcrest.CoreMatchers.equalTo; public class Netty4UtilsTests extends ESTestCase { @@ -75,6 +79,60 @@ public void testToChannelBuffer() throws IOException { assertArrayEquals(BytesReference.toBytes(ref), BytesReference.toBytes(bytesReference)); } + public void testMaybeError() { + final Error outOfMemoryError = new OutOfMemoryError(); + assertError(outOfMemoryError, outOfMemoryError); + + final DecoderException decoderException = new DecoderException(outOfMemoryError); + assertError(decoderException, outOfMemoryError); + + final Exception e = new Exception(); + e.addSuppressed(decoderException); + assertError(e, outOfMemoryError); + + final int depth = randomIntBetween(1, 16); + Throwable cause = new Exception(); + boolean fatal = false; + Error error = null; + for (int i = 0; i < depth; i++) { + final int length = randomIntBetween(1, 4); + for (int j = 0; j < length; j++) { + if (!fatal && rarely()) { + error = new Error(); + cause.addSuppressed(error); + fatal = true; + } else { + cause.addSuppressed(new Exception()); + } + } + if (!fatal && rarely()) { + cause = error = new Error(cause); + fatal = true; + } else { + cause = new Exception(cause); + } + } + if (fatal) { + assertError(cause, error); + } else { + assertFalse(Netty4Utils.maybeError(cause).isPresent()); + } + + assertFalse(Netty4Utils.maybeError(new Exception(new DecoderException())).isPresent()); + + Throwable chain = outOfMemoryError; + for (int i = 0; i < Netty4Utils.MAX_ITERATIONS; i++) { + chain = new Exception(chain); + } + assertFalse(Netty4Utils.maybeError(chain).isPresent()); + } + + private void assertError(final Throwable cause, final Error error) { + final Optional maybeError = Netty4Utils.maybeError(cause); + assertTrue(maybeError.isPresent()); + assertThat(maybeError.get(), equalTo(error)); + } + private BytesReference getRandomizedBytesReference(int length) throws IOException { // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays); diff --git a/modules/tribe/src/main/java/org/elasticsearch/tribe/TribePlugin.java b/modules/tribe/src/main/java/org/elasticsearch/tribe/TribePlugin.java index f77dea1dd306d..7479e60a95cba 100644 --- a/modules/tribe/src/main/java/org/elasticsearch/tribe/TribePlugin.java +++ b/modules/tribe/src/main/java/org/elasticsearch/tribe/TribePlugin.java @@ -137,7 +137,7 @@ public Settings additionalSettings() { return sb.build(); } else { - for (String s : settings.getAsMap().keySet()) { + for (String s : settings.keySet()) { if (s.startsWith("tribe.") && !s.equals(TribeService.TRIBE_NAME_SETTING.getKey())) { throw new IllegalArgumentException("tribe cannot contain inner tribes: " + s); } diff --git a/modules/tribe/src/main/java/org/elasticsearch/tribe/TribeService.java b/modules/tribe/src/main/java/org/elasticsearch/tribe/TribeService.java index 714749b94782c..d32df242e487c 100644 --- a/modules/tribe/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/modules/tribe/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -195,7 +195,7 @@ public TribeService(Settings settings, NodeEnvironment nodeEnvironment, ClusterS * combined with tribe specific settings. */ static Settings buildClientSettings(String tribeName, String parentNodeId, Settings globalSettings, Settings tribeSettings) { - for (String tribeKey : tribeSettings.getAsMap().keySet()) { + for (String tribeKey : tribeSettings.keySet()) { if (tribeKey.startsWith("path.")) { throw new IllegalArgumentException("Setting [" + tribeKey + "] not allowed in tribe client [" + tribeName + "]"); } diff --git a/plugins/analysis-icu/licenses/lucene-NOTICE.txt b/plugins/analysis-icu/licenses/lucene-NOTICE.txt index ecf08201a5ee6..1a1d51572432a 100644 --- a/plugins/analysis-icu/licenses/lucene-NOTICE.txt +++ b/plugins/analysis-icu/licenses/lucene-NOTICE.txt @@ -54,13 +54,14 @@ The KStem stemmer in was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) under the BSD-license. -The Arabic,Persian,Romanian,Bulgarian, and Hindi analyzers (common) come with a default +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, -analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt See http://members.unine.ch/jacques.savoy/clef/index.html. The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.0.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.0.0.jar.sha1 deleted file mode 100644 index 9441a04d2865d..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -37868189327b684f6df10cc29bde626d5de4cd9e \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..ba04076220499 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +0677be4595aecb88e8052e309dfd2e5909107a24 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-NOTICE.txt b/plugins/analysis-kuromoji/licenses/lucene-NOTICE.txt index ecf08201a5ee6..1a1d51572432a 100644 --- a/plugins/analysis-kuromoji/licenses/lucene-NOTICE.txt +++ b/plugins/analysis-kuromoji/licenses/lucene-NOTICE.txt @@ -54,13 +54,14 @@ The KStem stemmer in was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) under the BSD-license. -The Arabic,Persian,Romanian,Bulgarian, and Hindi analyzers (common) come with a default +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, -analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt See http://members.unine.ch/jacques.savoy/clef/index.html. The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.0.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.0.0.jar.sha1 deleted file mode 100644 index e731e2a591f88..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c0bdeeaa5228b20f128082da209012131521398c \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..979b8fc979573 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +8fdf628e764891f3753d6aa6a4bd81a60543b249 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-NOTICE.txt b/plugins/analysis-phonetic/licenses/lucene-NOTICE.txt index ecf08201a5ee6..1a1d51572432a 100644 --- a/plugins/analysis-phonetic/licenses/lucene-NOTICE.txt +++ b/plugins/analysis-phonetic/licenses/lucene-NOTICE.txt @@ -54,13 +54,14 @@ The KStem stemmer in was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) under the BSD-license. -The Arabic,Persian,Romanian,Bulgarian, and Hindi analyzers (common) come with a default +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, -analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt See http://members.unine.ch/jacques.savoy/clef/index.html. The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.0.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.0.0.jar.sha1 deleted file mode 100644 index 9f7f8802f683f..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -57b98e4d0565dacfa8e4aa998fd2577ee00c5748 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..8d33e380b0691 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +1c23328881752f6590a463cb2a06acb1d952214d \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-NOTICE.txt b/plugins/analysis-smartcn/licenses/lucene-NOTICE.txt index ecf08201a5ee6..1a1d51572432a 100644 --- a/plugins/analysis-smartcn/licenses/lucene-NOTICE.txt +++ b/plugins/analysis-smartcn/licenses/lucene-NOTICE.txt @@ -54,13 +54,14 @@ The KStem stemmer in was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) under the BSD-license. -The Arabic,Persian,Romanian,Bulgarian, and Hindi analyzers (common) come with a default +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, -analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt See http://members.unine.ch/jacques.savoy/clef/index.html. The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.0.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.0.0.jar.sha1 deleted file mode 100644 index e444a6b154059..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f5a4d545012f8feb66358e39f44d2f59d708ff95 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..fca544111a52e --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +1e5db24f38cb9fff03942123dcbdf4e46fc3760e \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-NOTICE.txt b/plugins/analysis-stempel/licenses/lucene-NOTICE.txt index ecf08201a5ee6..1a1d51572432a 100644 --- a/plugins/analysis-stempel/licenses/lucene-NOTICE.txt +++ b/plugins/analysis-stempel/licenses/lucene-NOTICE.txt @@ -54,13 +54,14 @@ The KStem stemmer in was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) under the BSD-license. -The Arabic,Persian,Romanian,Bulgarian, and Hindi analyzers (common) come with a default +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, -analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt See http://members.unine.ch/jacques.savoy/clef/index.html. The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.0.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.0.0.jar.sha1 deleted file mode 100644 index 1901dadb307bf..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -50c9bf11eae81b3ed3de91300714070d51301db0 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..3d558870f95d8 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +2f3b84a76adf611b942c3e1ebce35686ce0e077d \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-NOTICE.txt b/plugins/analysis-ukrainian/licenses/lucene-NOTICE.txt index ecf08201a5ee6..1a1d51572432a 100644 --- a/plugins/analysis-ukrainian/licenses/lucene-NOTICE.txt +++ b/plugins/analysis-ukrainian/licenses/lucene-NOTICE.txt @@ -54,13 +54,14 @@ The KStem stemmer in was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) under the BSD-license. -The Arabic,Persian,Romanian,Bulgarian, and Hindi analyzers (common) come with a default +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, -analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt See http://members.unine.ch/jacques.savoy/clef/index.html. The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.0.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.0.0.jar.sha1 deleted file mode 100644 index e6d9c4b7ad830..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ed6e0fde67713e7002cc481d6c398e6ce3e536f \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.1.0-snapshot-f33ed4ba12a.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.1.0-snapshot-f33ed4ba12a.jar.sha1 new file mode 100644 index 0000000000000..f6f61a2f32b64 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.1.0-snapshot-f33ed4ba12a.jar.sha1 @@ -0,0 +1 @@ +c33e9c9a609ac7e6cb5d3695b42d4fddded47ae0 \ No newline at end of file diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java index 58e34136f2e72..880be6c037323 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java @@ -115,8 +115,8 @@ class HostType { * instances with a tag key set to stage, and a value of dev. Several tags set will require all of those tags to be set for the * instance to be included. */ - Setting.AffixSetting TAG_SETTING = Setting.prefixKeySetting("discovery.ec2.tag.", - key -> Setting.simpleString(key, Property.NodeScope)); + Setting.AffixSetting> TAG_SETTING = Setting.prefixKeySetting("discovery.ec2.tag.", + key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope)); AmazonEC2 client(); } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index 117490593d36e..f291413d408ed 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -65,7 +65,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos private final Set groups; - private final Map tags; + private final Map> tags; private final Set availabilityZones; @@ -206,7 +206,7 @@ private DescribeInstancesRequest buildDescribeInstancesRequest() { new Filter("instance-state-name").withValues("running", "pending") ); - for (Map.Entry tagFilter : tags.entrySet()) { + for (Map.Entry> tagFilter : tags.entrySet()) { // for a given tag key, OR relationship for multiple different values describeInstancesRequest.withFilters( new Filter("tag:" + tagFilter.getKey()).withValues(tagFilter.getValue()) diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java index aaaceb67d468e..8f7671697db56 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java @@ -38,26 +38,24 @@ public class AzureBlobContainer extends AbstractBlobContainer { - protected final Logger logger = Loggers.getLogger(AzureBlobContainer.class); - protected final AzureBlobStore blobStore; + private final Logger logger = Loggers.getLogger(AzureBlobContainer.class); + private final AzureBlobStore blobStore; - protected final String keyPath; - protected final String repositoryName; + private final String keyPath; - public AzureBlobContainer(String repositoryName, BlobPath path, AzureBlobStore blobStore) { + public AzureBlobContainer(BlobPath path, AzureBlobStore blobStore) { super(path); this.blobStore = blobStore; this.keyPath = path.buildAsString(); - this.repositoryName = repositoryName; } @Override public boolean blobExists(String blobName) { logger.trace("blobExists({})", blobName); try { - return blobStore.blobExists(blobStore.container(), buildKey(blobName)); + return blobStore.blobExists(buildKey(blobName)); } catch (URISyntaxException | StorageException e) { - logger.warn("can not access [{}] in container {{}}: {}", blobName, blobStore.container(), e.getMessage()); + logger.warn("can not access [{}] in container {{}}: {}", blobName, blobStore, e.getMessage()); } return false; } @@ -77,7 +75,7 @@ public InputStream readBlob(String blobName) throws IOException { } try { - return blobStore.getInputStream(blobStore.container(), buildKey(blobName)); + return blobStore.getInputStream(buildKey(blobName)); } catch (StorageException e) { if (e.getHttpStatusCode() == HttpURLConnection.HTTP_NOT_FOUND) { throw new NoSuchFileException(e.getMessage()); @@ -110,9 +108,9 @@ public void deleteBlob(String blobName) throws IOException { } try { - blobStore.deleteBlob(blobStore.container(), buildKey(blobName)); + blobStore.deleteBlob(buildKey(blobName)); } catch (URISyntaxException | StorageException e) { - logger.warn("can not access [{}] in container {{}}: {}", blobName, blobStore.container(), e.getMessage()); + logger.warn("can not access [{}] in container {{}}: {}", blobName, blobStore, e.getMessage()); throw new IOException(e); } } @@ -122,9 +120,9 @@ public Map listBlobsByPrefix(@Nullable String prefix) thro logger.trace("listBlobsByPrefix({})", prefix); try { - return blobStore.listBlobsByPrefix(blobStore.container(), keyPath, prefix); + return blobStore.listBlobsByPrefix(keyPath, prefix); } catch (URISyntaxException | StorageException e) { - logger.warn("can not access [{}] in container {{}}: {}", prefix, blobStore.container(), e.getMessage()); + logger.warn("can not access [{}] in container {{}}: {}", prefix, blobStore, e.getMessage()); throw new IOException(e); } } @@ -136,12 +134,11 @@ public void move(String sourceBlobName, String targetBlobName) throws IOExceptio String source = keyPath + sourceBlobName; String target = keyPath + targetBlobName; - logger.debug("moving blob [{}] to [{}] in container {{}}", source, target, blobStore.container()); + logger.debug("moving blob [{}] to [{}] in container {{}}", source, target, blobStore); - blobStore.moveBlob(blobStore.container(), source, target); + blobStore.moveBlob(source, target); } catch (URISyntaxException | StorageException e) { - logger.warn("can not move blob [{}] to [{}] in container {{}}: {}", sourceBlobName, targetBlobName, blobStore.container(), - e.getMessage()); + logger.warn("can not move blob [{}] to [{}] in container {{}}: {}", sourceBlobName, targetBlobName, blobStore, e.getMessage()); throw new IOException(e); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index e23abe4aeb310..7e8987ae94576 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -45,14 +45,12 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { private final String clientName; private final LocationMode locMode; private final String container; - private final String repositoryName; public AzureBlobStore(RepositoryMetaData metadata, Settings settings, AzureStorageService client) throws URISyntaxException, StorageException { super(settings); this.client = client; this.container = Repository.CONTAINER_SETTING.get(metadata.settings()); - this.repositoryName = metadata.name(); this.clientName = Repository.CLIENT_NAME.get(metadata.settings()); String modeStr = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); @@ -68,10 +66,6 @@ public String toString() { return container; } - public String container() { - return container; - } - /** * Gets the configured {@link LocationMode} for the Azure storage requests. */ @@ -81,7 +75,7 @@ public LocationMode getLocationMode() { @Override public BlobContainer blobContainer(BlobPath path) { - return new AzureBlobContainer(repositoryName, path, this); + return new AzureBlobContainer(path, this); } @Override @@ -98,37 +92,32 @@ public void delete(BlobPath path) { public void close() { } - public boolean doesContainerExist(String container) + public boolean doesContainerExist() { return this.client.doesContainerExist(this.clientName, this.locMode, container); } - public void deleteFiles(String container, String path) throws URISyntaxException, StorageException - { - this.client.deleteFiles(this.clientName, this.locMode, container, path); - } - - public boolean blobExists(String container, String blob) throws URISyntaxException, StorageException + public boolean blobExists(String blob) throws URISyntaxException, StorageException { return this.client.blobExists(this.clientName, this.locMode, container, blob); } - public void deleteBlob(String container, String blob) throws URISyntaxException, StorageException + public void deleteBlob(String blob) throws URISyntaxException, StorageException { this.client.deleteBlob(this.clientName, this.locMode, container, blob); } - public InputStream getInputStream(String container, String blob) throws URISyntaxException, StorageException, IOException + public InputStream getInputStream(String blob) throws URISyntaxException, StorageException, IOException { return this.client.getInputStream(this.clientName, this.locMode, container, blob); } - public Map listBlobsByPrefix(String container, String keyPath, String prefix) + public Map listBlobsByPrefix(String keyPath, String prefix) throws URISyntaxException, StorageException { return this.client.listBlobsByPrefix(this.clientName, this.locMode, container, keyPath, prefix); } - public void moveBlob(String container, String sourceBlob, String targetBlob) throws URISyntaxException, StorageException + public void moveBlob(String sourceBlob, String targetBlob) throws URISyntaxException, StorageException { this.client.moveBlob(this.clientName, this.locMode, container, sourceBlob, targetBlob); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 8069400f02d47..06bf10fb2e292 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -153,8 +153,8 @@ protected ByteSizeValue chunkSize() { @Override public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { - if (blobStore.doesContainerExist(blobStore.container()) == false) { - throw new IllegalArgumentException("The bucket [" + blobStore.container() + "] does not exist. Please create it before " + + if (blobStore.doesContainerExist() == false) { + throw new IllegalArgumentException("The bucket [" + blobStore + "] does not exist. Please create it before " + " creating an azure snapshot repository backed by it."); } super.initializeSnapshot(snapshotId, indices, clusterMetadata); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 66975e4dcc6d7..aa57cc1128f08 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -120,9 +120,9 @@ private FileContext createContext(URI uri, Settings repositorySettings) { hadoopConfiguration.setClassLoader(HdfsRepository.class.getClassLoader()); hadoopConfiguration.reloadConfiguration(); - Map map = repositorySettings.getByPrefix("conf.").getAsMap(); - for (Entry entry : map.entrySet()) { - hadoopConfiguration.set(entry.getKey(), entry.getValue()); + final Settings confSettings = repositorySettings.getByPrefix("conf."); + for (String key : confSettings.keySet()) { + hadoopConfiguration.set(key, confSettings.get(key)); } // Create a hadoop user diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java index 02caeca808944..2f74a5180e195 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.settings.Settings; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasEntry; @@ -41,10 +42,10 @@ public void testPathHome() throws Exception { true, output -> {}, (foreground, pidFile, quiet, esSettings) -> { - Map settings = esSettings.settings().getAsMap(); + Settings settings = esSettings.settings(); assertThat(settings.size(), equalTo(2)); - assertThat(settings, hasEntry("path.home", value)); - assertThat(settings, hasKey("path.logs")); // added by env initialization + assertEquals(value, settings.get("path.home")); + assertTrue(settings.keySet().contains("path.logs")); // added by env initialization }); System.clearProperty("es.path.home"); @@ -54,10 +55,10 @@ public void testPathHome() throws Exception { true, output -> {}, (foreground, pidFile, quiet, esSettings) -> { - Map settings = esSettings.settings().getAsMap(); + Settings settings = esSettings.settings(); assertThat(settings.size(), equalTo(2)); - assertThat(settings, hasEntry("path.home", commandLineValue)); - assertThat(settings, hasKey("path.logs")); // added by env initialization + assertEquals(commandLineValue, settings.get("path.home")); + assertTrue(settings.keySet().contains("path.logs")); // added by env initialization }, "-Epath.home=" + commandLineValue); diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 42b6f40a3cd0a..95e62416cade3 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -48,6 +48,10 @@ for (Version version : indexCompatVersions) { numBwcNodes = 2 numNodes = 2 clusterName = 'full-cluster-restart' + + // some tests rely on the translog not being flushed + setting 'indices.memory.shard_inactive_time', '20m' + if (version.onOrAfter('5.3.0')) { setting 'http.content_type.required', 'true' } @@ -64,6 +68,10 @@ for (Version version : indexCompatVersions) { "${baseName}#oldClusterTestCluster#node1.stop" distribution = 'zip' clusterName = 'full-cluster-restart' + + // some tests rely on the translog not being flushed + setting 'indices.memory.shard_inactive_time', '20m' + numNodes = 2 dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir } cleanShared = false // We want to keep snapshots made by the old cluster! diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java index b5076868f04f0..b3ef9cb7dd02b 100644 --- a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java @@ -42,27 +42,10 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; public class IndexingIT extends ESRestTestCase { - private void assertOK(Response response) { - assertThat(response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); - } - - private void ensureGreen() throws IOException { - Map params = new HashMap<>(); - params.put("wait_for_status", "green"); - params.put("wait_for_no_relocating_shards", "true"); - assertOK(client().performRequest("GET", "_cluster/health", params)); - } - - private void createIndex(String name, Settings settings) throws IOException { - assertOK(client().performRequest("PUT", name, Collections.emptyMap(), - new StringEntity("{ \"settings\": " + Strings.toString(settings) + " }", ContentType.APPLICATION_JSON))); - } - private void updateIndexSetting(String name, Settings.Builder settings) throws IOException { updateIndexSetting(name, settings.build()); } diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml index e6b0e9d13c0fc..7726a1df0b10d 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml @@ -165,3 +165,14 @@ - match: { hits.total: 2 } - match: { hits.hits.0._source.filter_field: 1 } - match: { hits.hits.0._index: "my_remote_cluster:test_index" } + +--- +"Single shard search gets properly proxied": + + - do: + search: + index: "my_remote_cluster:single_doc_index" + + - match: { _shards.total: 1 } + - match: { hits.total: 1 } + - match: { hits.hits.0._index: "my_remote_cluster:single_doc_index"} diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 0956a9f053ba9..6c21d3e37ef30 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -18,23 +18,16 @@ */ package org.elasticsearch.upgrades; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ObjectPath; -import java.io.IOException; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -83,24 +76,6 @@ protected Settings restClientSettings() { .build(); } - private void assertOK(Response response) { - assertThat(response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); - } - - private void ensureGreen() throws IOException { - Map params = new HashMap<>(); - params.put("wait_for_status", "green"); - params.put("wait_for_no_relocating_shards", "true"); - params.put("timeout", "70s"); - params.put("level", "shards"); - assertOK(client().performRequest("GET", "_cluster/health", params)); - } - - private void createIndex(String name, Settings settings) throws IOException { - assertOK(client().performRequest("PUT", name, Collections.emptyMap(), - new StringEntity("{ \"settings\": " + Strings.toString(settings) + " }", ContentType.APPLICATION_JSON))); - } - public void testHistoryUUIDIsGenerated() throws Exception { final String index = "index_history_uuid"; if (clusterType == CLUSTER_TYPE.OLD) { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml index c6ba03a9aeb8d..6bc9f0084b704 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml @@ -65,7 +65,8 @@ - skip: version: " - 5.4.99" reason: confusing exception messaged caused by empty object fixed in 5.5.0 - + features: ["headers"] + - do: catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ headers: diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index ce0e58558dd42..ff67f874fda0d 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -112,6 +112,8 @@ private static String toCamelCase(String s) { .put("arabicnormalization", MovedToAnalysisCommon.class) .put("arabicstem", MovedToAnalysisCommon.class) .put("asciifolding", MovedToAnalysisCommon.class) + .put("bengalinormalization", MovedToAnalysisCommon.class) + .put("bengalistem", MovedToAnalysisCommon.class) .put("brazilianstem", MovedToAnalysisCommon.class) .put("bulgarianstem", MovedToAnalysisCommon.class) .put("cjkbigram", MovedToAnalysisCommon.class) @@ -191,7 +193,6 @@ private static String toCamelCase(String s) { .put("flattengraph", MovedToAnalysisCommon.class) // TODO: these tokenfilters are not yet exposed: useful? - // suggest stop .put("suggeststop", Void.class) // capitalizes tokens diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index aa004512ac751..b0b0ffc9df8ad 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -416,7 +416,7 @@ public void randomIndexTemplate() throws IOException { randomSettingsBuilder.put("index.codec", CodecService.LUCENE_DEFAULT_CODEC); } - for (String setting : randomSettingsBuilder.internalMap().keySet()) { + for (String setting : randomSettingsBuilder.keys()) { assertThat("non index. prefix setting set on index template, its a node setting...", setting, startsWith("index.")); } // always default delayed allocation to 0 to make sure we have tests are not delayed @@ -548,15 +548,15 @@ protected final void afterInternal(boolean afterClass) throws Exception { if (cluster() != null) { if (currentClusterScope != Scope.TEST) { MetaData metaData = client().admin().cluster().prepareState().execute().actionGet().getState().getMetaData(); - final Map persistent = metaData.persistentSettings().getAsMap(); + final Set persistent = metaData.persistentSettings().keySet(); assertThat("test leaves persistent cluster metadata behind: " + persistent, persistent.size(), equalTo(0)); - final Map transientSettings = new HashMap<>(metaData.transientSettings().getAsMap()); + final Set transientSettings = new HashSet<>(metaData.transientSettings().keySet()); if (isInternalCluster() && internalCluster().getAutoManageMinMasterNode()) { // this is set by the test infra transientSettings.remove(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()); } assertThat("test leaves transient cluster metadata behind: " + transientSettings, - transientSettings.keySet(), empty()); + transientSettings, empty()); } ensureClusterSizeConsistency(); ensureClusterStateConsistency(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index ca296c9d1ae13..0363a938dd18f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -112,9 +112,9 @@ public void tearDown() throws Exception { super.tearDown(); assertAcked(client().admin().indices().prepareDelete("*").get()); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - assertThat("test leaves persistent cluster metadata behind: " + metaData.persistentSettings().getAsMap(), + assertThat("test leaves persistent cluster metadata behind: " + metaData.persistentSettings().keySet(), metaData.persistentSettings().size(), equalTo(0)); - assertThat("test leaves transient cluster metadata behind: " + metaData.transientSettings().getAsMap(), + assertThat("test leaves transient cluster metadata behind: " + metaData.transientSettings().keySet(), metaData.transientSettings().size(), equalTo(0)); if (resetNodeAfterTest()) { assert NODE != null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 42977105058c1..b35dc9563ce5c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -26,9 +26,11 @@ import java.util.ArrayList; import java.util.List; +import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertTrue; /** * Test appender that can be used to verify that certain events were logged correctly @@ -122,6 +124,37 @@ public void assertMatched() { } } + public static class PatternSeenEventExcpectation implements LoggingExpectation { + + protected final String name; + protected final String logger; + protected final Level level; + protected final String pattern; + volatile boolean saw; + + public PatternSeenEventExcpectation(String name, String logger, Level level, String pattern) { + this.name = name; + this.logger = logger; + this.level = level; + this.pattern = pattern; + } + + @Override + public void match(LogEvent event) { + if (event.getLevel().equals(level) && event.getLoggerName().equals(logger)) { + if (Pattern.matches(pattern, event.getMessage().getFormattedMessage())) { + saw = true; + } + } + } + + @Override + public void assertMatched() { + assertThat(name, saw, equalTo(true)); + } + + } + private static String getLoggerName(String name) { if (name.startsWith("org.elasticsearch.")) { name = name.substring("org.elasticsearch.".length()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 97b58ceda72f4..cef820ac0096a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -21,6 +21,8 @@ import org.apache.http.Header; import org.apache.http.HttpHost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; @@ -30,6 +32,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -41,6 +44,7 @@ import org.junit.AfterClass; import org.junit.Before; +import javax.net.ssl.SSLContext; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; @@ -51,16 +55,19 @@ import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; - -import javax.net.ssl.SSLContext; +import java.util.concurrent.TimeUnit; import static java.util.Collections.singletonMap; import static java.util.Collections.sort; import static java.util.Collections.unmodifiableList; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; /** * Superclass for tests that interact with an external test cluster using Elasticsearch's {@link RestClient}. @@ -287,7 +294,7 @@ private void waitForClusterStateUpdatesToFinish() throws Exception { } catch (IOException e) { fail("cannot get cluster's pending tasks: " + e.getMessage()); } - }); + }, 30, TimeUnit.SECONDS); } /** @@ -380,4 +387,28 @@ private Set runningTasks(Response response) throws IOException { } return runningTasks; } + + protected void assertOK(Response response) { + assertThat(response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); + } + + protected void ensureGreen() throws IOException { + Map params = new HashMap<>(); + params.put("wait_for_status", "green"); + params.put("wait_for_no_relocating_shards", "true"); + params.put("timeout", "70s"); + params.put("level", "shards"); + assertOK(client().performRequest("GET", "_cluster/health", params)); + } + + protected void createIndex(String name, Settings settings) throws IOException { + createIndex(name, settings, ""); + } + + protected void createIndex(String name, Settings settings, String mapping) throws IOException { + assertOK(client().performRequest("PUT", name, Collections.emptyMap(), + new StringEntity("{ \"settings\": " + Strings.toString(settings) + + ", \"mappings\" : {" + mapping + "} }", ContentType.APPLICATION_JSON))); + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 56e017fc1f9c0..29ff4219feecb 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -251,7 +251,9 @@ protected void closeChannels(List channels, boolean blocking, boole * side otherwise the client (node) initiates the TCP closing sequence which doesn't cause these issues. Setting this * by default from the beginning can have unexpected side-effects an should be avoided, our protocol is designed * in a way that clients close connection which is how it should be*/ - channel.activeChannel.setSoLinger(true, 0); + if (channel.isOpen.get()) { + channel.activeChannel.setSoLinger(true, 0); + } } } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/AcceptingSelector.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/AcceptingSelector.java index f43a061500526..e116d6421706d 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/AcceptingSelector.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/AcceptingSelector.java @@ -51,21 +51,22 @@ public AcceptingSelector(AcceptorEventHandler eventHandler, Selector selector) t } @Override - void doSelect(int timeout) throws IOException, ClosedSelectorException { - setUpNewServerChannels(); - - int ready = selector.select(timeout); - if (ready > 0) { - Set selectionKeys = selector.selectedKeys(); - Iterator keyIterator = selectionKeys.iterator(); - while (keyIterator.hasNext()) { - SelectionKey sk = keyIterator.next(); - keyIterator.remove(); - acceptChannel(sk); + void processKey(SelectionKey selectionKey) { + NioServerSocketChannel serverChannel = (NioServerSocketChannel) selectionKey.attachment(); + if (selectionKey.isAcceptable()) { + try { + eventHandler.acceptChannel(serverChannel); + } catch (IOException e) { + eventHandler.acceptException(serverChannel, e); } } } + @Override + void preSelect() { + setUpNewServerChannels(); + } + @Override void cleanup() { channelsToClose.addAll(newChannels); @@ -74,6 +75,7 @@ void cleanup() { /** * Schedules a NioServerSocketChannel to be registered with this selector. The channel will by queued and * eventually registered next time through the event loop. + * * @param serverSocketChannel the channel to register */ public void scheduleForRegistration(NioServerSocketChannel serverSocketChannel) { @@ -82,7 +84,7 @@ public void scheduleForRegistration(NioServerSocketChannel serverSocketChannel) wakeup(); } - private void setUpNewServerChannels() throws ClosedChannelException { + private void setUpNewServerChannels() { NioServerSocketChannel newChannel; while ((newChannel = this.newChannels.poll()) != null) { assert newChannel.getSelector() == this : "The channel must be registered with the selector with which it was created"; @@ -101,23 +103,4 @@ private void setUpNewServerChannels() throws ClosedChannelException { } } } - - private void acceptChannel(SelectionKey sk) { - NioServerSocketChannel serverChannel = (NioServerSocketChannel) sk.attachment(); - if (sk.isValid()) { - try { - if (sk.isAcceptable()) { - try { - eventHandler.acceptChannel(serverChannel); - } catch (IOException e) { - eventHandler.acceptException(serverChannel, e); - } - } - } catch (CancelledKeyException ex) { - eventHandler.genericServerChannelException(serverChannel, ex); - } - } else { - eventHandler.genericServerChannelException(serverChannel, new CancelledKeyException()); - } - } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/AcceptorEventHandler.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/AcceptorEventHandler.java index 0c360e11a4c05..3de846fd61f6b 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/AcceptorEventHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/AcceptorEventHandler.java @@ -87,16 +87,4 @@ void acceptException(NioServerSocketChannel nioServerChannel, Exception exceptio logger.debug(() -> new ParameterizedMessage("exception while accepting new channel from server channel: {}", nioServerChannel), exception); } - - /** - * This method is called when handling an event from a channel fails due to an unexpected exception. - * An example would be if checking ready ops on a {@link java.nio.channels.SelectionKey} threw - * {@link java.nio.channels.CancelledKeyException}. - * - * @param channel that caused the exception - * @param exception that was thrown - */ - void genericServerChannelException(NioServerSocketChannel channel, Exception exception) { - logger.debug(() -> new ParameterizedMessage("exception while handling event for server channel: {}", channel), exception); - } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/ESSelector.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/ESSelector.java index 9030d5781458e..ba0fae3ee3127 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/ESSelector.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/ESSelector.java @@ -24,9 +24,12 @@ import java.io.Closeable; import java.io.IOException; +import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedSelectorException; +import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.util.Collections; +import java.util.Iterator; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; @@ -40,8 +43,8 @@ * {@link #close()} is called. This instance handles closing of channels. Users should call * {@link #queueChannelClose(NioChannel)} to schedule a channel for close by this selector. *

- * Children of this class should implement the specific {@link #doSelect(int)} and {@link #cleanup()} - * functionality. + * Children of this class should implement the specific {@link #processKey(SelectionKey)}, + * {@link #preSelect()}, and {@link #cleanup()} functionality. */ public abstract class ESSelector implements Closeable { @@ -98,7 +101,26 @@ public void runLoop() { void singleLoop() { try { closePendingChannels(); - doSelect(300); + preSelect(); + + int ready = selector.select(300); + if (ready > 0) { + Set selectionKeys = selector.selectedKeys(); + Iterator keyIterator = selectionKeys.iterator(); + while (keyIterator.hasNext()) { + SelectionKey sk = keyIterator.next(); + keyIterator.remove(); + if (sk.isValid()) { + try { + processKey(sk); + } catch (CancelledKeyException cke) { + eventHandler.genericChannelException((NioChannel) sk.attachment(), cke); + } + } else { + eventHandler.genericChannelException((NioChannel) sk.attachment(), new CancelledKeyException()); + } + } + } } catch (ClosedSelectorException e) { if (isOpen()) { throw e; @@ -117,13 +139,19 @@ void cleanupAndCloseChannels() { } /** - * Should implement the specific select logic. This will be called once per {@link #singleLoop()} + * Called by the base {@link ESSelector} class when there is a {@link SelectionKey} to be handled. * - * @param timeout to pass to the raw select operation - * @throws IOException thrown by the raw select operation - * @throws ClosedSelectorException thrown if the raw selector is closed + * @param selectionKey the key to be handled + * @throws CancelledKeyException thrown when the key has already been cancelled + */ + abstract void processKey(SelectionKey selectionKey) throws CancelledKeyException; + + /** + * Called immediately prior to a raw {@link Selector#select()} call. Should be used to implement + * channel registration, handling queued writes, and other work that is not specifically processing + * a selection key. */ - abstract void doSelect(int timeout) throws IOException, ClosedSelectorException; + abstract void preSelect(); /** * Called once as the selector is being closed. diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/EventHandler.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/EventHandler.java index 30682406d8404..04e1b21b1b065 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/EventHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/EventHandler.java @@ -88,4 +88,16 @@ void handleClose(NioChannel channel) { void closeException(NioChannel channel, Exception exception) { logger.debug(() -> new ParameterizedMessage("exception while closing channel: {}", channel), exception); } + + /** + * This method is called when handling an event from a channel fails due to an unexpected exception. + * An example would be if checking ready ops on a {@link java.nio.channels.SelectionKey} threw + * {@link java.nio.channels.CancelledKeyException}. + * + * @param channel that caused the exception + * @param exception that was thrown + */ + void genericChannelException(NioChannel channel, Exception exception) { + logger.debug(() -> new ParameterizedMessage("exception while handling event for channel: {}", channel), exception); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/NioTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/NioTransport.java index 38ec361546e0a..606225fd02ad7 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/NioTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/NioTransport.java @@ -107,7 +107,9 @@ protected void closeChannels(List channels, boolean blocking, boolea * side otherwise the client (node) initiates the TCP closing sequence which doesn't cause these issues. Setting this * by default from the beginning can have unexpected side-effects an should be avoided, our protocol is designed * in a way that clients close connection which is how it should be*/ - channel.getRawChannel().setOption(StandardSocketOptions.SO_LINGER, 0); + if (channel.isOpen()) { + channel.getRawChannel().setOption(StandardSocketOptions.SO_LINGER, 0); + } } } ArrayList futures = new ArrayList<>(channels.size()); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/SocketEventHandler.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/SocketEventHandler.java index a57307decbb83..58958a2b3ce3f 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/SocketEventHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/SocketEventHandler.java @@ -21,6 +21,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.transport.nio.channel.NioChannel; import org.elasticsearch.transport.nio.channel.NioSocketChannel; import org.elasticsearch.transport.nio.channel.SelectionKeyUtils; import org.elasticsearch.transport.nio.channel.WriteContext; @@ -82,7 +83,6 @@ void handleConnect(NioSocketChannel channel) { void connectException(NioSocketChannel channel, Exception exception) { logger.debug(() -> new ParameterizedMessage("failed to connect to socket channel: {}", channel), exception); exceptionCaught(channel, exception); - } /** @@ -144,9 +144,9 @@ void writeException(NioSocketChannel channel, Exception exception) { * @param channel that caused the exception * @param exception that was thrown */ - void genericChannelException(NioSocketChannel channel, Exception exception) { - logger.debug(() -> new ParameterizedMessage("exception while handling event for socket channel: {}", channel), exception); - exceptionCaught(channel, exception); + void genericChannelException(NioChannel channel, Exception exception) { + super.genericChannelException(channel, exception); + exceptionCaught((NioSocketChannel) channel, exception); } private void exceptionCaught(NioSocketChannel channel, Exception e) { diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/SocketSelector.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/SocketSelector.java index b4da075f0fcc9..9c90463421a81 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/SocketSelector.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/SocketSelector.java @@ -24,13 +24,10 @@ import org.elasticsearch.transport.nio.channel.WriteContext; import java.io.IOException; -import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedChannelException; import java.nio.channels.ClosedSelectorException; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; -import java.util.Iterator; -import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; /** @@ -54,17 +51,30 @@ public SocketSelector(SocketEventHandler eventHandler, Selector selector) throws } @Override - void doSelect(int timeout) throws IOException, ClosedSelectorException { - setUpNewChannels(); - handleQueuedWrites(); + void processKey(SelectionKey selectionKey) { + NioSocketChannel nioSocketChannel = (NioSocketChannel) selectionKey.attachment(); + int ops = selectionKey.readyOps(); + if ((ops & SelectionKey.OP_CONNECT) != 0) { + attemptConnect(nioSocketChannel, true); + } + + if (nioSocketChannel.isConnectComplete()) { + if ((ops & SelectionKey.OP_WRITE) != 0) { + handleWrite(nioSocketChannel); + } - int ready = selector.select(timeout); - if (ready > 0) { - Set selectionKeys = selector.selectedKeys(); - processKeys(selectionKeys); + if ((ops & SelectionKey.OP_READ) != 0) { + handleRead(nioSocketChannel); + } } } + @Override + void preSelect() { + setUpNewChannels(); + handleQueuedWrites(); + } + @Override void cleanup() { WriteOperation op; @@ -122,38 +132,6 @@ public void queueWriteInChannelBuffer(WriteOperation writeOperation) { } } - private void processKeys(Set selectionKeys) { - Iterator keyIterator = selectionKeys.iterator(); - while (keyIterator.hasNext()) { - SelectionKey sk = keyIterator.next(); - keyIterator.remove(); - NioSocketChannel nioSocketChannel = (NioSocketChannel) sk.attachment(); - if (sk.isValid()) { - try { - int ops = sk.readyOps(); - if ((ops & SelectionKey.OP_CONNECT) != 0) { - attemptConnect(nioSocketChannel, true); - } - - if (nioSocketChannel.isConnectComplete()) { - if ((ops & SelectionKey.OP_WRITE) != 0) { - handleWrite(nioSocketChannel); - } - - if ((ops & SelectionKey.OP_READ) != 0) { - handleRead(nioSocketChannel); - } - } - } catch (CancelledKeyException e) { - eventHandler.genericChannelException(nioSocketChannel, e); - } - } else { - eventHandler.genericChannelException(nioSocketChannel, new CancelledKeyException()); - } - } - } - - private void handleWrite(NioSocketChannel nioSocketChannel) { try { eventHandler.handleWrite(nioSocketChannel); diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index a45a54d27bfff..25c96da81fa16 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -116,15 +116,16 @@ public static void assertClusters(InternalTestCluster cluster0, InternalTestClus } public static void assertSettings(Settings left, Settings right, boolean checkClusterUniqueSettings) { - Set> entries0 = left.getAsMap().entrySet(); - Map entries1 = right.getAsMap(); + Set keys0 = left.keySet(); + Set keys1 = right.keySet(); assertThat("--> left:\n" + left.toDelimitedString('\n') + "\n-->right:\n" + right.toDelimitedString('\n'), - entries0.size(), equalTo(entries1.size())); - for (Map.Entry entry : entries0) { - if (clusterUniqueSettings.contains(entry.getKey()) && checkClusterUniqueSettings == false) { + keys0.size(), equalTo(keys1.size())); + for (String key : keys0) { + if (clusterUniqueSettings.contains(key) && checkClusterUniqueSettings == false) { continue; } - assertThat(entries1, hasEntry(entry.getKey(), entry.getValue())); + assertTrue("key [" + key + "] is missing in " + keys1, keys1.contains(key)); + assertEquals(right.get(key), left.get(key)); } } @@ -137,10 +138,9 @@ private void assertMMNinNodeSetting(InternalTestCluster cluster, int masterNodes private void assertMMNinNodeSetting(String node, InternalTestCluster cluster, int masterNodes) { final int minMasterNodes = masterNodes / 2 + 1; Settings nodeSettings = cluster.client(node).admin().cluster().prepareNodesInfo(node).get().getNodes().get(0).getSettings(); - assertThat("node setting of node [" + node + "] has the wrong min_master_node setting: [" + assertEquals("node setting of node [" + node + "] has the wrong min_master_node setting: [" + nodeSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) + "]", - nodeSettings.getAsMap(), - hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes))); + DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.get(nodeSettings).intValue(), minMasterNodes); } private void assertMMNinClusterSetting(InternalTestCluster cluster, int masterNodes) { @@ -149,10 +149,9 @@ private void assertMMNinClusterSetting(InternalTestCluster cluster, int masterNo Settings stateSettings = cluster.client(node).admin().cluster().prepareState().setLocal(true) .get().getState().getMetaData().settings(); - assertThat("dynamic setting for node [" + node + "] has the wrong min_master_node setting : [" + assertEquals("dynamic setting for node [" + node + "] has the wrong min_master_node setting : [" + stateSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) + "]", - stateSettings.getAsMap(), - hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes))); + DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.get(stateSettings).intValue(), minMasterNodes); } } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/AcceptingSelectorTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/AcceptingSelectorTests.java index 05d3b292b0a8d..140c44133d3fd 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/AcceptingSelectorTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/AcceptingSelectorTests.java @@ -46,7 +46,6 @@ public class AcceptingSelectorTests extends ESTestCase { private NioServerSocketChannel serverChannel; private AcceptorEventHandler eventHandler; private TestSelectionKey selectionKey; - private HashSet keySet = new HashSet<>(); @Before public void setUp() throws Exception { @@ -64,14 +63,12 @@ public void setUp() throws Exception { when(serverChannel.getSelectionKey()).thenReturn(selectionKey); when(serverChannel.getSelector()).thenReturn(selector); when(serverChannel.isOpen()).thenReturn(true); - when(rawSelector.selectedKeys()).thenReturn(keySet); - when(rawSelector.select(0)).thenReturn(1); } public void testRegisteredChannel() throws IOException, PrivilegedActionException { selector.scheduleForRegistration(serverChannel); - selector.doSelect(0); + selector.preSelect(); verify(eventHandler).serverChannelRegistered(serverChannel); Set registeredChannels = selector.getRegisteredChannels(); @@ -83,7 +80,7 @@ public void testClosedChannelWillNotBeRegistered() throws Exception { when(serverChannel.isOpen()).thenReturn(false); selector.scheduleForRegistration(serverChannel); - selector.doSelect(0); + selector.preSelect(); verify(eventHandler).registrationException(same(serverChannel), any(ClosedChannelException.class)); @@ -98,7 +95,7 @@ public void testRegisterChannelFailsDueToException() throws Exception { ClosedChannelException closedChannelException = new ClosedChannelException(); doThrow(closedChannelException).when(serverChannel).register(); - selector.doSelect(0); + selector.preSelect(); verify(eventHandler).registrationException(serverChannel, closedChannelException); @@ -109,21 +106,19 @@ public void testRegisterChannelFailsDueToException() throws Exception { public void testAcceptEvent() throws IOException { selectionKey.setReadyOps(SelectionKey.OP_ACCEPT); - keySet.add(selectionKey); - selector.doSelect(0); + selector.processKey(selectionKey); verify(eventHandler).acceptChannel(serverChannel); } public void testAcceptException() throws IOException { selectionKey.setReadyOps(SelectionKey.OP_ACCEPT); - keySet.add(selectionKey); IOException ioException = new IOException(); doThrow(ioException).when(eventHandler).acceptChannel(serverChannel); - selector.doSelect(0); + selector.processKey(selectionKey); verify(eventHandler).acceptException(serverChannel, ioException); } @@ -131,7 +126,7 @@ public void testAcceptException() throws IOException { public void testCleanup() throws IOException { selector.scheduleForRegistration(serverChannel); - selector.doSelect(0); + selector.preSelect(); assertEquals(1, selector.getRegisteredChannels().size()); diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/ESSelectorTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/ESSelectorTests.java index 53705fcf5216b..afcd42dcb528e 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/ESSelectorTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/ESSelectorTests.java @@ -24,8 +24,12 @@ import org.junit.Before; import java.io.IOException; +import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedSelectorException; +import java.nio.channels.SelectionKey; +import java.nio.channels.Selector; +import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -34,12 +38,14 @@ public class ESSelectorTests extends ESTestCase { private ESSelector selector; private EventHandler handler; + private Selector rawSelector; @Before public void setUp() throws Exception { super.setUp(); handler = mock(EventHandler.class); - selector = new TestSelector(handler); + rawSelector = mock(Selector.class); + selector = new TestSelector(handler, rawSelector); } public void testQueueChannelForClosed() throws IOException { @@ -61,9 +67,8 @@ public void testQueueChannelForClosed() throws IOException { } public void testSelectorClosedExceptionIsNotCaughtWhileRunning() throws IOException { - ((TestSelector) this.selector).setClosedSelectorException(new ClosedSelectorException()); - boolean closedSelectorExceptionCaught = false; + when(rawSelector.select(anyInt())).thenThrow(new ClosedSelectorException()); try { this.selector.singleLoop(); } catch (ClosedSelectorException e) { @@ -75,7 +80,8 @@ public void testSelectorClosedExceptionIsNotCaughtWhileRunning() throws IOExcept public void testIOExceptionWhileSelect() throws IOException { IOException ioException = new IOException(); - ((TestSelector) this.selector).setIOException(ioException); + + when(rawSelector.select(anyInt())).thenThrow(ioException); this.selector.singleLoop(); @@ -84,34 +90,23 @@ public void testIOExceptionWhileSelect() throws IOException { private static class TestSelector extends ESSelector { - private ClosedSelectorException closedSelectorException; - private IOException ioException; - - protected TestSelector(EventHandler eventHandler) throws IOException { - super(eventHandler); + TestSelector(EventHandler eventHandler, Selector selector) throws IOException { + super(eventHandler, selector); } @Override - void doSelect(int timeout) throws IOException, ClosedSelectorException { - if (closedSelectorException != null) { - throw closedSelectorException; - } - if (ioException != null) { - throw ioException; - } + void processKey(SelectionKey selectionKey) throws CancelledKeyException { + } @Override - void cleanup() { + void preSelect() { } - public void setClosedSelectorException(ClosedSelectorException exception) { - this.closedSelectorException = exception; - } + @Override + void cleanup() { - public void setIOException(IOException ioException) { - this.ioException = ioException; } } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SocketSelectorTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SocketSelectorTests.java index 50ce4a55b2960..cb266831530c8 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SocketSelectorTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SocketSelectorTests.java @@ -53,7 +53,6 @@ public class SocketSelectorTests extends ESTestCase { private NioSocketChannel channel; private TestSelectionKey selectionKey; private WriteContext writeContext; - private HashSet keySet = new HashSet<>(); private ActionListener listener; private NetworkBytesReference bufferReference = NetworkBytesReference.wrap(new BytesArray(new byte[1])); @@ -72,8 +71,6 @@ public void setUp() throws Exception { this.socketSelector = new SocketSelector(eventHandler, rawSelector); this.socketSelector.setThread(); - when(rawSelector.selectedKeys()).thenReturn(keySet); - when(rawSelector.select(0)).thenReturn(1); when(channel.isOpen()).thenReturn(true); when(channel.getSelectionKey()).thenReturn(selectionKey); when(channel.getWriteContext()).thenReturn(writeContext); @@ -84,7 +81,7 @@ public void setUp() throws Exception { public void testRegisterChannel() throws Exception { socketSelector.scheduleForRegistration(channel); - socketSelector.doSelect(0); + socketSelector.preSelect(); verify(eventHandler).handleRegistration(channel); @@ -97,7 +94,7 @@ public void testClosedChannelWillNotBeRegistered() throws Exception { when(channel.isOpen()).thenReturn(false); socketSelector.scheduleForRegistration(channel); - socketSelector.doSelect(0); + socketSelector.preSelect(); verify(eventHandler).registrationException(same(channel), any(ClosedChannelException.class)); verify(channel, times(0)).finishConnect(); @@ -113,7 +110,7 @@ public void testRegisterChannelFailsDueToException() throws Exception { ClosedChannelException closedChannelException = new ClosedChannelException(); doThrow(closedChannelException).when(channel).register(); - socketSelector.doSelect(0); + socketSelector.preSelect(); verify(eventHandler).registrationException(channel, closedChannelException); verify(channel, times(0)).finishConnect(); @@ -128,7 +125,7 @@ public void testSuccessfullyRegisterChannelWillConnect() throws Exception { when(channel.finishConnect()).thenReturn(true); - socketSelector.doSelect(0); + socketSelector.preSelect(); verify(eventHandler).handleConnect(channel); } @@ -138,7 +135,7 @@ public void testConnectIncompleteWillNotNotify() throws Exception { when(channel.finishConnect()).thenReturn(false); - socketSelector.doSelect(0); + socketSelector.preSelect(); verify(eventHandler, times(0)).handleConnect(channel); } @@ -156,7 +153,7 @@ public void testQueueWriteChannelIsNoLongerWritable() throws Exception { socketSelector.queueWrite(writeOperation); when(channel.isWritable()).thenReturn(false); - socketSelector.doSelect(0); + socketSelector.preSelect(); verify(writeContext, times(0)).queueWriteOperations(writeOperation); verify(listener).onFailure(any(ClosedChannelException.class)); @@ -172,7 +169,7 @@ public void testQueueWriteSelectionKeyThrowsException() throws Exception { when(channel.isWritable()).thenReturn(true); when(channel.getSelectionKey()).thenReturn(selectionKey); when(selectionKey.interestOps(anyInt())).thenThrow(cancelledKeyException); - socketSelector.doSelect(0); + socketSelector.preSelect(); verify(writeContext, times(0)).queueWriteOperations(writeOperation); verify(listener).onFailure(cancelledKeyException); @@ -185,7 +182,7 @@ public void testQueueWriteSuccessful() throws Exception { assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) == 0); when(channel.isWritable()).thenReturn(true); - socketSelector.doSelect(0); + socketSelector.preSelect(); verify(writeContext).queueWriteOperations(writeOperation); assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) != 0); @@ -219,42 +216,36 @@ public void testQueueDirectlyInChannelBufferSelectionKeyThrowsException() throws } public void testConnectEvent() throws Exception { - keySet.add(selectionKey); - selectionKey.setReadyOps(SelectionKey.OP_CONNECT); when(channel.finishConnect()).thenReturn(true); - socketSelector.doSelect(0); + socketSelector.processKey(selectionKey); verify(eventHandler).handleConnect(channel); } public void testConnectEventFinishUnsuccessful() throws Exception { - keySet.add(selectionKey); - selectionKey.setReadyOps(SelectionKey.OP_CONNECT); when(channel.finishConnect()).thenReturn(false); - socketSelector.doSelect(0); + socketSelector.processKey(selectionKey); verify(eventHandler, times(0)).handleConnect(channel); } public void testConnectEventFinishThrowException() throws Exception { - keySet.add(selectionKey); IOException ioException = new IOException(); selectionKey.setReadyOps(SelectionKey.OP_CONNECT); when(channel.finishConnect()).thenThrow(ioException); - socketSelector.doSelect(0); + socketSelector.processKey(selectionKey); verify(eventHandler, times(0)).handleConnect(channel); verify(eventHandler).connectException(channel, ioException); } public void testWillNotConsiderWriteOrReadUntilConnectionComplete() throws Exception { - keySet.add(selectionKey); IOException ioException = new IOException(); selectionKey.setReadyOps(SelectionKey.OP_WRITE | SelectionKey.OP_READ); @@ -262,54 +253,48 @@ public void testWillNotConsiderWriteOrReadUntilConnectionComplete() throws Excep doThrow(ioException).when(eventHandler).handleWrite(channel); when(channel.isConnectComplete()).thenReturn(false); - socketSelector.doSelect(0); + socketSelector.processKey(selectionKey); verify(eventHandler, times(0)).handleWrite(channel); verify(eventHandler, times(0)).handleRead(channel); } public void testSuccessfulWriteEvent() throws Exception { - keySet.add(selectionKey); - selectionKey.setReadyOps(SelectionKey.OP_WRITE); - socketSelector.doSelect(0); + socketSelector.processKey(selectionKey); verify(eventHandler).handleWrite(channel); } public void testWriteEventWithException() throws Exception { - keySet.add(selectionKey); IOException ioException = new IOException(); selectionKey.setReadyOps(SelectionKey.OP_WRITE); doThrow(ioException).when(eventHandler).handleWrite(channel); - socketSelector.doSelect(0); + socketSelector.processKey(selectionKey); verify(eventHandler).writeException(channel, ioException); } public void testSuccessfulReadEvent() throws Exception { - keySet.add(selectionKey); - selectionKey.setReadyOps(SelectionKey.OP_READ); - socketSelector.doSelect(0); + socketSelector.processKey(selectionKey); verify(eventHandler).handleRead(channel); } public void testReadEventWithException() throws Exception { - keySet.add(selectionKey); IOException ioException = new IOException(); selectionKey.setReadyOps(SelectionKey.OP_READ); doThrow(ioException).when(eventHandler).handleRead(channel); - socketSelector.doSelect(0); + socketSelector.processKey(selectionKey); verify(eventHandler).readException(channel, ioException); } @@ -319,7 +304,7 @@ public void testCleanup() throws Exception { socketSelector.scheduleForRegistration(channel); - socketSelector.doSelect(0); + socketSelector.preSelect(); NetworkBytesReference networkBuffer = NetworkBytesReference.wrap(new BytesArray(new byte[1])); socketSelector.queueWrite(new WriteOperation(mock(NioSocketChannel.class), networkBuffer, listener));