diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 23ebe45372033..bbb01426fded3 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -38,6 +38,11 @@ If you have a bugfix or new feature that you would like to contribute to Elastic
We enjoy working with contributors to get their code accepted. There are many approaches to fixing a problem and it is important to find the best approach before writing too much code.
+Note that it is unlikely the project will merge refactors for the sake of refactoring. These
+types of pull requests have a high cost to maintainers in reviewing and testing with little
+to no tangible benefit. This especially includes changes generated by tools. For example,
+converting all generic interface instances to use the diamond operator.
+
The process for contributing to any of the [Elastic repositories](https://github.com/elastic/) is similar. Details for individual projects can be found below.
### Fork and clone the repository
diff --git a/TESTING.asciidoc b/TESTING.asciidoc
index d0e1e0f50528c..9f64d1dd0afb8 100644
--- a/TESTING.asciidoc
+++ b/TESTING.asciidoc
@@ -472,28 +472,30 @@ is tested depends on the branch. On master, this will test against the current
stable branch. On the stable branch, it will test against the latest release
branch. Finally, on a release branch, it will test against the most recent release.
-=== BWC Testing against a specific branch
+=== BWC Testing against a specific remote/branch
Sometimes a backward compatibility change spans two versions. A common case is a new functionality
that needs a BWC bridge in and an unreleased versioned of a release branch (for example, 5.x).
-To test the changes, you can instruct gradle to build the BWC version from a local branch instead of
-pulling the release branch from GitHub. You do so using the `tests.bwc.refspec` system property:
+To test the changes, you can instruct gradle to build the BWC version from a another remote/branch combination instead of
+pulling the release branch from GitHub. You do so using the `tests.bwc.remote` and `tests.bwc.refspec` system properties:
-------------------------------------------------
-gradle check -Dtests.bwc.refspec=origin/index_req_bwc_5.x
+gradle check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec=index_req_bwc_5.x
-------------------------------------------------
-The branch needs to be available on the local clone that the BWC makes of the repository you run the
-tests from. Using the `origin` remote is a handy trick to make sure that a branch is available
-and is up to date in the case of multiple runs.
+The branch needs to be available on the remote that the BWC makes of the
+repository you run the tests from. Using the remote is a handy trick to make
+sure that a branch is available and is up to date in the case of multiple runs.
Example:
-Say you need to make a change to `master` and have a BWC layer in `5.x`. You will need to:
-. Create a branch called `index_req_change` off `master`. This will contain your change.
+Say you need to make a change to `master` and have a BWC layer in `5.x`. You
+will need to:
+. Create a branch called `index_req_change` off your remote `${remote}`. This
+will contain your change.
. Create a branch called `index_req_bwc_5.x` off `5.x`. This will contain your bwc layer.
-. If not running the tests locally, push both branches to your remote repository.
-. Run the tests with `gradle check -Dtests.bwc.refspec=origin/index_req_bwc_5.x`
+. Push both branches to your remote repository.
+. Run the tests with `gradle check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec=index_req_bwc_5.x`.
== Coverage analysis
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy
index ab618a0fdc7f7..af84a44233aa3 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy
@@ -63,13 +63,11 @@ class ClusterConfiguration {
boolean debug = false
/**
- * if true each node will be configured with discovery.zen.minimum_master_nodes set
- * to the total number of nodes in the cluster. This will also cause that each node has `0s` state recovery
- * timeout which can lead to issues if for instance an existing clusterstate is expected to be recovered
- * before any tests start
+ * Configuration of the setting discovery.zen.minimum_master_nodes on the nodes.
+ * In case of more than one node, this defaults to (number of nodes / 2) + 1
*/
@Input
- boolean useMinimumMasterNodes = true
+ Closure minimumMasterNodes = { getNumNodes() > 1 ? getNumNodes().intdiv(2) + 1 : -1 }
@Input
String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
index 217ecb4ed90fb..14074f241df91 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
@@ -311,13 +311,14 @@ class ClusterFormationTasks {
// Define a node attribute so we can test that it exists
'node.attr.testattr' : 'test'
]
- // we set min master nodes to the total number of nodes in the cluster and
- // basically skip initial state recovery to allow the cluster to form using a realistic master election
- // this means all nodes must be up, join the seed node and do a master election. This will also allow new and
- // old nodes in the BWC case to become the master
- if (node.config.useMinimumMasterNodes && node.config.numNodes > 1) {
- esConfig['discovery.zen.minimum_master_nodes'] = node.config.numNodes
- esConfig['discovery.initial_state_timeout'] = '0s' // don't wait for state.. just start up quickly
+ int minimumMasterNodes = node.config.minimumMasterNodes.call()
+ if (minimumMasterNodes > 0) {
+ esConfig['discovery.zen.minimum_master_nodes'] = minimumMasterNodes
+ }
+ if (node.config.numNodes > 1) {
+ // don't wait for state.. just start up quickly
+ // this will also allow new and old nodes in the BWC case to become the master
+ esConfig['discovery.initial_state_timeout'] = '0s'
}
esConfig['node.max_local_storage_nodes'] = node.config.numNodes
esConfig['http.port'] = node.config.httpPort
diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java
index 5143bdd870594..529182aa98f7d 100644
--- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java
+++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java
@@ -142,8 +142,8 @@ public NoopSearchRequestBuilder setRouting(String... routing) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public NoopSearchRequestBuilder setPreference(String preference) {
request.preference(preference);
diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java
index 30d6c142100d6..5839251b6b728 100644
--- a/core/src/main/java/org/elasticsearch/Version.java
+++ b/core/src/main/java/org/elasticsearch/Version.java
@@ -115,7 +115,7 @@ public class Version implements Comparable {
new Version(V_6_0_0_rc1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final int V_6_0_0_rc2_ID = 6000052;
public static final Version V_6_0_0_rc2 =
- new Version(V_6_0_0_rc2_ID, org.apache.lucene.util.Version.LUCENE_7_0_0);
+ new Version(V_6_0_0_rc2_ID, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final int V_6_1_0_ID = 6010099;
public static final Version V_6_1_0 =
new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java
index d8dfd71530922..d127829fa3584 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java
@@ -146,8 +146,8 @@ public ClusterSearchShardsRequest routing(String... routings) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public ClusterSearchShardsRequest preference(String preference) {
this.preference = preference;
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java
index 7cb7ac1254c60..da31a79fc9bf0 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java
@@ -55,8 +55,8 @@ public ClusterSearchShardsRequestBuilder setRouting(String... routing) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public ClusterSearchShardsRequestBuilder setPreference(String preference) {
request.preference(preference);
diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java
index 93045182f4c20..ea5dda45279e6 100644
--- a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java
@@ -152,8 +152,8 @@ public GetRequest routing(String routing) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public GetRequest preference(String preference) {
this.preference = preference;
diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java
index 973b130bedbd2..1ca8dbde65200 100644
--- a/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java
@@ -76,8 +76,8 @@ public GetRequestBuilder setRouting(String routing) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public GetRequestBuilder setPreference(String preference) {
request.preference(preference);
diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
index 20a619cec2c70..420e0b448b052 100644
--- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
@@ -284,8 +284,8 @@ public ActionRequestValidationException validate() {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public MultiGetRequest preference(String preference) {
this.preference = preference;
diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java
index a2cb204d5eabf..fd7a6ac88253e 100644
--- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java
@@ -58,8 +58,8 @@ public MultiGetRequestBuilder add(MultiGetRequest.Item item) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public MultiGetRequestBuilder setPreference(String preference) {
request.preference(preference);
diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
index 25a624b2eb558..fea3cd1043c62 100644
--- a/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
@@ -64,8 +64,8 @@ public int shardId() {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public MultiGetShardRequest preference(String preference) {
this.preference = preference;
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java
index 030d19d8b6879..7bfa317c72c70 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java
@@ -241,8 +241,8 @@ public SearchRequest routing(String... routings) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public SearchRequest preference(String preference) {
this.preference = preference;
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java
index 41e5babb64635..922e9be5fd75d 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java
@@ -144,8 +144,8 @@ public SearchRequestBuilder setRouting(String... routing) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public SearchRequestBuilder setPreference(String preference) {
request.preference(preference);
diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java
index 6356c554991e6..8fdb6398ddccf 100644
--- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java
@@ -59,8 +59,8 @@ public int shardId() {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public MultiTermVectorsShardRequest preference(String preference) {
this.preference = preference;
diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java
index 0fe83e214463a..1886a8c2661ed 100644
--- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java
@@ -294,8 +294,7 @@ public String preference() {
/**
* Sets the preference to execute the search. Defaults to randomize across
- * shards. Can be set to _local to prefer local shards,
- * _primary to execute only on primary shards, or a custom value,
+ * shards. Can be set to _local to prefer local shards or a custom value,
* which guarantees that the same order will be used across different
* requests.
*/
diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java
index 9aa3ebca759c3..47bd09b100857 100644
--- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java
@@ -99,8 +99,8 @@ public TermVectorsRequestBuilder setParent(String parent) {
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
- * _local to prefer local shards, _primary to execute only on primary shards, or
- * a custom value, which guarantees that the same order will be used across different requests.
+ * _local to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
*/
public TermVectorsRequestBuilder setPreference(String preference) {
request.preference(preference);
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java
index f8d42b3d8f5a0..a2d015a0dd13f 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java
@@ -441,74 +441,6 @@ public ShardIterator primaryShardIt() {
return new PlainShardIterator(shardId, primaryAsList);
}
- public ShardIterator primaryActiveInitializingShardIt() {
- if (noPrimariesActive()) {
- return new PlainShardIterator(shardId, NO_SHARDS);
- }
- return primaryShardIt();
- }
-
- public ShardIterator primaryFirstActiveInitializingShardsIt() {
- ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size());
- // fill it in a randomized fashion
- for (ShardRouting shardRouting : shuffler.shuffle(activeShards)) {
- ordered.add(shardRouting);
- if (shardRouting.primary()) {
- // switch, its the matching node id
- ordered.set(ordered.size() - 1, ordered.get(0));
- ordered.set(0, shardRouting);
- }
- }
- // no need to worry about primary first here..., its temporal
- if (!allInitializingShards.isEmpty()) {
- ordered.addAll(allInitializingShards);
- }
- return new PlainShardIterator(shardId, ordered);
- }
-
- public ShardIterator replicaActiveInitializingShardIt() {
- // If the primaries are unassigned, return an empty list (there aren't
- // any replicas to query anyway)
- if (noPrimariesActive()) {
- return new PlainShardIterator(shardId, NO_SHARDS);
- }
-
- LinkedList ordered = new LinkedList<>();
- for (ShardRouting replica : shuffler.shuffle(replicas)) {
- if (replica.active()) {
- ordered.addFirst(replica);
- } else if (replica.initializing()) {
- ordered.addLast(replica);
- }
- }
- return new PlainShardIterator(shardId, ordered);
- }
-
- public ShardIterator replicaFirstActiveInitializingShardsIt() {
- // If the primaries are unassigned, return an empty list (there aren't
- // any replicas to query anyway)
- if (noPrimariesActive()) {
- return new PlainShardIterator(shardId, NO_SHARDS);
- }
-
- ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size());
- // fill it in a randomized fashion with the active replicas
- for (ShardRouting replica : shuffler.shuffle(replicas)) {
- if (replica.active()) {
- ordered.add(replica);
- }
- }
-
- // Add the primary shard
- ordered.add(primary);
-
- // Add initializing shards last
- if (!allInitializingShards.isEmpty()) {
- ordered.addAll(allInitializingShards);
- }
- return new PlainShardIterator(shardId, ordered);
- }
-
public ShardIterator onlyNodeActiveInitializingShardsIt(String nodeId) {
ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size());
int seed = shuffler.nextSeed();
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
index 296eca476a6c5..87adb55704a25 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
@@ -198,14 +198,6 @@ private ShardIterator preferenceActiveShardIterator(IndexShardRoutingTable index
return indexShard.preferNodeActiveInitializingShardsIt(nodesIds);
case LOCAL:
return indexShard.preferNodeActiveInitializingShardsIt(Collections.singleton(localNodeId));
- case PRIMARY:
- return indexShard.primaryActiveInitializingShardIt();
- case REPLICA:
- return indexShard.replicaActiveInitializingShardIt();
- case PRIMARY_FIRST:
- return indexShard.primaryFirstActiveInitializingShardsIt();
- case REPLICA_FIRST:
- return indexShard.replicaFirstActiveInitializingShardsIt();
case ONLY_LOCAL:
return indexShard.onlyNodeActiveInitializingShardsIt(localNodeId);
case ONLY_NODES:
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/Preference.java b/core/src/main/java/org/elasticsearch/cluster/routing/Preference.java
index d4685d7aeadc1..9a55a99a51ca8 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/Preference.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/Preference.java
@@ -39,26 +39,6 @@ public enum Preference {
*/
LOCAL("_local"),
- /**
- * Route to primary shards
- */
- PRIMARY("_primary"),
-
- /**
- * Route to replica shards
- */
- REPLICA("_replica"),
-
- /**
- * Route to primary shards first
- */
- PRIMARY_FIRST("_primary_first"),
-
- /**
- * Route to replica shards first
- */
- REPLICA_FIRST("_replica_first"),
-
/**
* Route to the local shard only
*/
@@ -97,16 +77,6 @@ public static Preference parse(String preference) {
return PREFER_NODES;
case "_local":
return LOCAL;
- case "_primary":
- return PRIMARY;
- case "_replica":
- return REPLICA;
- case "_primary_first":
- case "_primaryFirst":
- return PRIMARY_FIRST;
- case "_replica_first":
- case "_replicaFirst":
- return REPLICA_FIRST;
case "_only_local":
case "_onlyLocal":
return ONLY_LOCAL;
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java
index 4160fd224aa14..f00e9cdc3ce8f 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.routing.allocation.decider;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import com.carrotsearch.hppc.ObjectIntHashMap;
@@ -85,7 +86,7 @@ public class AwarenessAllocationDecider extends AllocationDecider {
private volatile String[] awarenessAttributes;
- private volatile Map forcedAwarenessAttributes;
+ private volatile Map> forcedAwarenessAttributes;
public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) {
super(settings);
@@ -97,11 +98,11 @@ public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSett
}
private void setForcedAwarenessAttributes(Settings forceSettings) {
- Map forcedAwarenessAttributes = new HashMap<>();
+ Map> forcedAwarenessAttributes = new HashMap<>();
Map forceGroups = forceSettings.getAsGroups();
for (Map.Entry entry : forceGroups.entrySet()) {
- String[] aValues = entry.getValue().getAsArray("values");
- if (aValues.length > 0) {
+ List aValues = entry.getValue().getAsList("values");
+ if (aValues.size() > 0) {
forcedAwarenessAttributes.put(entry.getKey(), aValues);
}
}
@@ -169,7 +170,7 @@ private Decision underCapacity(ShardRouting shardRouting, RoutingNode node, Rout
}
int numberOfAttributes = nodesPerAttribute.size();
- String[] fullValues = forcedAwarenessAttributes.get(awarenessAttribute);
+ List fullValues = forcedAwarenessAttributes.get(awarenessAttribute);
if (fullValues != null) {
for (String fullValue : fullValues) {
if (!shardPerAttribute.containsKey(fullValue)) {
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
index ee6e422e82676..f35df27e3b338 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
@@ -804,14 +804,14 @@ private static class ListSetting extends Setting> {
private ListSetting(String key, Function> defaultStringValue, Function> parser,
Property... properties) {
- super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser,
+ super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s)), parser,
properties);
this.defaultStringValue = defaultStringValue;
}
@Override
public String getRaw(Settings settings) {
- String[] array = settings.getAsArray(getKey(), null);
+ List array = settings.getAsList(getKey(), null);
return array == null ? defaultValue.apply(settings) : arrayToParsableString(array);
}
@@ -823,11 +823,11 @@ boolean hasComplexMatcher() {
@Override
public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) {
if (exists(source) == false) {
- String[] asArray = defaultSettings.getAsArray(getKey(), null);
- if (asArray == null) {
- builder.putArray(getKey(), defaultStringValue.apply(defaultSettings));
+ List asList = defaultSettings.getAsList(getKey(), null);
+ if (asList == null) {
+ builder.putList(getKey(), defaultStringValue.apply(defaultSettings));
} else {
- builder.putArray(getKey(), asArray);
+ builder.putList(getKey(), asList);
}
}
}
@@ -1087,7 +1087,7 @@ private static List parseableStringToList(String parsableString) {
}
}
- private static String arrayToParsableString(String[] array) {
+ private static String arrayToParsableString(List array) {
try {
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
builder.startArray();
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java
index a1adef3338662..41acefdd8e879 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java
@@ -366,48 +366,48 @@ public SizeValue getAsSize(String setting, SizeValue defaultValue) throws Settin
}
/**
- * The values associated with a setting key as an array.
+ * The values associated with a setting key as an immutable list.
*
* It will also automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
- * @param key The setting prefix to load the array by
- * @return The setting array values
+ * @param key The setting key to load the list by
+ * @return The setting list values
*/
- public String[] getAsArray(String key) throws SettingsException {
- return getAsArray(key, Strings.EMPTY_ARRAY, true);
+ public List getAsList(String key) throws SettingsException {
+ return getAsList(key, Collections.emptyList());
}
/**
- * The values associated with a setting key as an array.
+ * The values associated with a setting key as an immutable list.
*
* If commaDelimited is true, it will automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
- * @param key The setting key to load the array by
- * @return The setting array values
+ * @param key The setting key to load the list by
+ * @return The setting list values
*/
- public String[] getAsArray(String key, String[] defaultArray) throws SettingsException {
- return getAsArray(key, defaultArray, true);
+ public List getAsList(String key, List defaultValue) throws SettingsException {
+ return getAsList(key, defaultValue, true);
}
/**
- * The values associated with a setting key as an array.
+ * The values associated with a setting key as an immutable list.
*
* It will also automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
- * @param key The setting key to load the array by
- * @param defaultArray The default array to use if no value is specified
+ * @param key The setting key to load the list by
+ * @param defaultValue The default value to use if no value is specified
* @param commaDelimited Whether to try to parse a string as a comma-delimited value
- * @return The setting array values
+ * @return The setting list values
*/
- public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelimited) throws SettingsException {
+ public List getAsList(String key, List defaultValue, Boolean commaDelimited) throws SettingsException {
List result = new ArrayList<>();
final Object valueFromPrefix = settings.get(key);
if (valueFromPrefix != null) {
if (valueFromPrefix instanceof List) {
- result = ((List) valueFromPrefix);
+ return ((List) valueFromPrefix); // it's already unmodifiable since the builder puts it as a such
} else if (commaDelimited) {
String[] strings = Strings.splitStringByCommaToArray(get(key));
if (strings.length > 0) {
@@ -421,9 +421,9 @@ public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelim
}
if (result.isEmpty()) {
- return defaultArray;
+ return defaultValue;
}
- return result.toArray(new String[result.size()]);
+ return Collections.unmodifiableList(result);
}
@@ -552,7 +552,7 @@ public static Settings readSettingsFromStream(StreamInput in) throws IOException
if (value == null) {
builder.putNull(key);
} else if (value instanceof List) {
- builder.putArray(key, (List) value);
+ builder.putList(key, (List) value);
} else {
builder.put(key, value.toString());
}
@@ -679,7 +679,7 @@ private static void fromXContent(XContentParser parser, StringBuilder keyBuilder
}
String key = keyBuilder.toString();
validateValue(key, list, builder, parser, allowNullValues);
- builder.putArray(key, list);
+ builder.putList(key, list);
} else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
String key = keyBuilder.toString();
validateValue(key, null, builder, parser, allowNullValues);
@@ -898,7 +898,7 @@ public Builder copy(String key, String sourceKey, Settings source) {
}
final Object value = source.settings.get(sourceKey);
if (value instanceof List) {
- return putArray(key, (List)value);
+ return putList(key, (List)value);
} else if (value == null) {
return putNull(key);
} else {
@@ -1022,8 +1022,8 @@ public Builder put(String setting, long value, ByteSizeUnit sizeUnit) {
* @param values The values
* @return The builder
*/
- public Builder putArray(String setting, String... values) {
- return putArray(setting, Arrays.asList(values));
+ public Builder putList(String setting, String... values) {
+ return putList(setting, Arrays.asList(values));
}
/**
@@ -1033,7 +1033,7 @@ public Builder putArray(String setting, String... values) {
* @param values The values
* @return The builder
*/
- public Builder putArray(String setting, List values) {
+ public Builder putList(String setting, List values) {
remove(setting);
map.put(setting, Collections.unmodifiableList(new ArrayList<>(values)));
return this;
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
index 1ce119636f734..95c08e8889857 100644
--- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
@@ -407,11 +407,10 @@ private ThreadContextStruct putHeaders(Map headers) {
if (headers.isEmpty()) {
return this;
} else {
- final Map newHeaders = new HashMap<>();
+ final Map newHeaders = new HashMap<>(this.requestHeaders);
for (Map.Entry entry : headers.entrySet()) {
putSingleHeader(entry.getKey(), entry.getValue(), newHeaders);
}
- newHeaders.putAll(this.requestHeaders);
return new ThreadContextStruct(newHeaders, responseHeaders, transientHeaders, isSystemContext);
}
}
diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java
index 27fe23048d79c..31a67333a810f 100644
--- a/core/src/main/java/org/elasticsearch/env/Environment.java
+++ b/core/src/main/java/org/elasticsearch/env/Environment.java
@@ -153,7 +153,7 @@ public Environment(final Settings settings, final Path configPath) {
Settings.Builder finalSettings = Settings.builder().put(settings);
finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile);
if (PATH_DATA_SETTING.exists(settings)) {
- finalSettings.putArray(PATH_DATA_SETTING.getKey(), dataPaths);
+ finalSettings.putList(PATH_DATA_SETTING.getKey(), dataPaths);
}
finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString());
this.settings = finalSettings.build();
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
index c6f1bfe7f41d1..d736703f6418e 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
@@ -68,7 +68,6 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
@@ -105,10 +104,10 @@ public static CharArraySet parseStemExclusion(Settings settings, CharArraySet de
if ("_none_".equals(value)) {
return CharArraySet.EMPTY_SET;
}
- String[] stemExclusion = settings.getAsArray("stem_exclusion", null);
+ List stemExclusion = settings.getAsList("stem_exclusion", null);
if (stemExclusion != null) {
// LUCENE 4 UPGRADE: Should be settings.getAsBoolean("stem_exclusion_case", false)?
- return new CharArraySet(Arrays.asList(stemExclusion), false);
+ return new CharArraySet(stemExclusion, false);
} else {
return defaultStemExclusion;
}
@@ -161,7 +160,7 @@ public static CharArraySet parseWords(Environment env, Settings settings, String
if ("_none_".equals(value)) {
return CharArraySet.EMPTY_SET;
} else {
- return resolveNamedWords(Arrays.asList(settings.getAsArray(name)), namedWords, ignoreCase);
+ return resolveNamedWords(settings.getAsList(name), namedWords, ignoreCase);
}
}
List pathLoadedWords = getWordList(env, settings, name);
@@ -225,11 +224,11 @@ public static List getWordList(Environment env, Settings settings, Strin
String wordListPath = settings.get(settingPrefix + "_path", null);
if (wordListPath == null) {
- String[] explicitWordList = settings.getAsArray(settingPrefix, null);
+ List explicitWordList = settings.getAsList(settingPrefix, null);
if (explicitWordList == null) {
return null;
} else {
- return Arrays.asList(explicitWordList);
+ return explicitWordList;
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
index e9654719bdc11..4ba078051640a 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
@@ -58,8 +58,8 @@ public void build(final Map tokenizers, final Map charFiltersList = new ArrayList<>(charFilterNames.length);
+ List charFilterNames = analyzerSettings.getAsList("char_filter");
+ List charFiltersList = new ArrayList<>(charFilterNames.size());
for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
@@ -74,8 +74,8 @@ public void build(final Map tokenizers, final Map tokenFilterList = new ArrayList<>(tokenFilterNames.length);
+ List tokenFilterNames = analyzerSettings.getAsList("filter");
+ List tokenFilterList = new ArrayList<>(tokenFilterNames.size());
for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
index a375c1e8e3b9d..a0a7859d50cfd 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
@@ -50,8 +50,8 @@ public void build(final TokenizerFactory keywordTokenizerFactory, final Map charFiltersList = new ArrayList<>(charFilterNames.length);
+ List charFilterNames = analyzerSettings.getAsList("char_filter");
+ List charFiltersList = new ArrayList<>(charFilterNames.size());
for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
@@ -66,8 +66,8 @@ public void build(final TokenizerFactory keywordTokenizerFactory, final Map tokenFilterList = new ArrayList<>(tokenFilterNames.length);
+ List tokenFilterNames = analyzerSettings.getAsList("filter");
+ List tokenFilterList = new ArrayList<>(tokenFilterNames.size());
for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java
index cb696219f4ed6..8210883b2f8f5 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java
@@ -41,7 +41,7 @@ public EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment enviro
super(indexSettings, name, settings);
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
- this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
+ this.matcher = parseTokenChars(settings.getAsList("token_chars"));
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java
index 17acddf55e1a3..2a31f1eb26a3e 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java
@@ -28,6 +28,7 @@
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.HashMap;
+import java.util.List;
import java.util.Locale;
import java.util.Map;
@@ -65,8 +66,8 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
MATCHERS = unmodifiableMap(matchers);
}
- static CharMatcher parseTokenChars(String[] characterClasses) {
- if (characterClasses == null || characterClasses.length == 0) {
+ static CharMatcher parseTokenChars(List characterClasses) {
+ if (characterClasses == null || characterClasses.isEmpty()) {
return null;
}
CharMatcher.Builder builder = new CharMatcher.Builder();
@@ -85,7 +86,7 @@ public NGramTokenizerFactory(IndexSettings indexSettings, Environment environmen
super(indexSettings, name, settings);
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
- this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
+ this.matcher = parseTokenChars(settings.getAsList("token_chars"));
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java
index f61d51258b01b..bf9045c5d00e1 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java
@@ -62,7 +62,7 @@ public TokenStream create(TokenStream tokenStream) {
protected Reader getRulesFromSettings(Environment env) {
Reader rulesReader;
- if (settings.getAsArray("synonyms", null) != null) {
+ if (settings.getAsList("synonyms", null) != null) {
List rulesList = Analysis.getWordList(env, settings, "synonyms");
StringBuilder sb = new StringBuilder();
for (String line : rulesList) {
diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java
index dcd1399bf5159..fe7d097638f31 100644
--- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java
@@ -43,8 +43,9 @@
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
-import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType;
+import org.elasticsearch.index.fielddata.SortingNumericDoubleValues;
import org.elasticsearch.index.mapper.DateFieldMapper;
+import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
@@ -346,22 +347,23 @@ public boolean needsScores() {
@Override
protected NumericDoubleValues distance(LeafReaderContext context) {
final MultiGeoPointValues geoPointValues = fieldData.load(context).getGeoPointValues();
- return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() {
- @Override
- public int docValueCount() {
- return geoPointValues.docValueCount();
- }
-
+ return mode.select(new SortingNumericDoubleValues() {
@Override
public boolean advanceExact(int docId) throws IOException {
- return geoPointValues.advanceExact(docId);
- }
-
- @Override
- public double nextValue() throws IOException {
- GeoPoint other = geoPointValues.nextValue();
- return Math.max(0.0d,
- distFunction.calculate(origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS) - offset);
+ if (geoPointValues.advanceExact(docId)) {
+ int n = geoPointValues.docValueCount();
+ resize(n);
+ for (int i = 0; i < n; i++) {
+ GeoPoint other = geoPointValues.nextValue();
+ double distance = distFunction.calculate(
+ origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS);
+ values[i] = Math.max(0.0d, distance - offset);
+ }
+ sort();
+ return true;
+ } else {
+ return false;
+ }
}
}, 0.0);
}
@@ -427,20 +429,20 @@ public boolean needsScores() {
@Override
protected NumericDoubleValues distance(LeafReaderContext context) {
final SortedNumericDoubleValues doubleValues = fieldData.load(context).getDoubleValues();
- return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() {
+ return mode.select(new SortingNumericDoubleValues() {
@Override
- public int docValueCount() {
- return doubleValues.docValueCount();
- }
-
- @Override
- public boolean advanceExact(int doc) throws IOException {
- return doubleValues.advanceExact(doc);
- }
-
- @Override
- public double nextValue() throws IOException {
- return Math.max(0.0d, Math.abs(doubleValues.nextValue() - origin) - offset);
+ public boolean advanceExact(int docId) throws IOException {
+ if (doubleValues.advanceExact(docId)) {
+ int n = doubleValues.docValueCount();
+ resize(n);
+ for (int i = 0; i < n; i++) {
+ values[i] = Math.max(0.0d, Math.abs(doubleValues.nextValue() - origin) - offset);
+ }
+ sort();
+ return true;
+ } else {
+ return false;
+ }
}
}, 0.0);
}
@@ -542,10 +544,11 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE
if (distance.advanceExact(docId) == false) {
return Explanation.noMatch("No value for the distance");
}
+ double value = distance.doubleValue();
return Explanation.match(
(float) score(docId, subQueryScore.getValue()),
"Function for field " + getFieldName() + ":",
- func.explainFunction(getDistanceString(ctx, docId), distance.doubleValue(), scale));
+ func.explainFunction(getDistanceString(ctx, docId), value, scale));
}
};
}
diff --git a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java
index 1195644e328a0..b2ee4b8ffbd5f 100644
--- a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java
+++ b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java
@@ -104,16 +104,6 @@ protected double pick(SortedNumericDoubleValues values, double missingValue, Doc
}
return totalCount > 0 ? totalValue : missingValue;
}
-
- @Override
- protected double pick(UnsortedNumericDoubleValues values) throws IOException {
- final int count = values.docValueCount();
- double total = 0;
- for (int index = 0; index < count; ++index) {
- total += values.nextValue();
- }
- return total;
- }
},
/**
@@ -177,16 +167,6 @@ protected double pick(SortedNumericDoubleValues values, double missingValue, Doc
}
return totalValue/totalCount;
}
-
- @Override
- protected double pick(UnsortedNumericDoubleValues values) throws IOException {
- final int count = values.docValueCount();
- double total = 0;
- for (int index = 0; index < count; ++index) {
- total += values.nextValue();
- }
- return total/count;
- }
},
/**
@@ -303,16 +283,6 @@ protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc
}
return hasValue ? ord : -1;
}
-
- @Override
- protected double pick(UnsortedNumericDoubleValues values) throws IOException {
- int count = values.docValueCount();
- double min = Double.POSITIVE_INFINITY;
- for (int index = 0; index < count; ++index) {
- min = Math.min(values.nextValue(), min);
- }
- return min;
- }
},
/**
@@ -419,16 +389,6 @@ protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc
}
return ord;
}
-
- @Override
- protected double pick(UnsortedNumericDoubleValues values) throws IOException {
- int count = values.docValueCount();
- double max = Double.NEGATIVE_INFINITY;
- for (int index = 0; index < count; ++index) {
- max = Math.max(values.nextValue(), max);
- }
- return max;
- }
};
/**
@@ -456,11 +416,11 @@ public NumericDocValues select(final SortedNumericDocValues values, final long m
if (singleton != null) {
return new AbstractNumericDocValues() {
- private boolean hasValue;
+ private long value;
@Override
public boolean advanceExact(int target) throws IOException {
- hasValue = singleton.advanceExact(target);
+ this.value = singleton.advanceExact(target) ? singleton.longValue() : missingValue;
return true;
}
@@ -471,17 +431,17 @@ public int docID() {
@Override
public long longValue() throws IOException {
- return hasValue ? singleton.longValue() : missingValue;
+ return this.value;
}
};
} else {
return new AbstractNumericDocValues() {
- private boolean hasValue;
+ private long value;
@Override
public boolean advanceExact(int target) throws IOException {
- hasValue = values.advanceExact(target);
+ this.value = values.advanceExact(target) ? pick(values) : missingValue;
return true;
}
@@ -492,7 +452,7 @@ public int docID() {
@Override
public long longValue() throws IOException {
- return hasValue ? pick(values) : missingValue;
+ return value;
}
};
}
@@ -573,35 +533,33 @@ public NumericDoubleValues select(final SortedNumericDoubleValues values, final
final NumericDoubleValues singleton = FieldData.unwrapSingleton(values);
if (singleton != null) {
return new NumericDoubleValues() {
-
- private boolean hasValue;
+ private double value;
@Override
public boolean advanceExact(int doc) throws IOException {
- hasValue = singleton.advanceExact(doc);
+ this.value = singleton.advanceExact(doc) ? singleton.doubleValue() : missingValue;
return true;
}
@Override
public double doubleValue() throws IOException {
- return hasValue ? singleton.doubleValue() : missingValue;
+ return this.value;
}
-
};
} else {
return new NumericDoubleValues() {
- private boolean hasValue;
+ private double value;
@Override
public boolean advanceExact(int target) throws IOException {
- hasValue = values.advanceExact(target);
+ value = values.advanceExact(target) ? pick(values) : missingValue;
return true;
}
@Override
public double doubleValue() throws IOException {
- return hasValue ? pick(values) : missingValue;
+ return this.value;
}
};
}
@@ -678,17 +636,17 @@ public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef
}
return new AbstractBinaryDocValues() {
- private boolean hasValue;
+ private BytesRef value;
@Override
public boolean advanceExact(int target) throws IOException {
- hasValue = singleton.advanceExact(target);
+ this.value = singleton.advanceExact(target) ? singleton.binaryValue() : missingValue;
return true;
}
@Override
public BytesRef binaryValue() throws IOException {
- return hasValue ? singleton.binaryValue() : missingValue;
+ return this.value;
}
};
} else {
@@ -905,43 +863,6 @@ protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}
- /**
- * Return a {@link NumericDoubleValues} instance that can be used to sort documents
- * with this mode and the provided values. When a document has no value,
- * missingValue is returned.
- *
- * Allowed Modes: SUM, AVG, MIN, MAX
- */
- public NumericDoubleValues select(final UnsortedNumericDoubleValues values, final double missingValue) {
- return new NumericDoubleValues() {
- private boolean hasValue;
-
- @Override
- public boolean advanceExact(int doc) throws IOException {
- hasValue = values.advanceExact(doc);
- return true;
- }
- @Override
- public double doubleValue() throws IOException {
- return hasValue ? pick(values) : missingValue;
- }
- };
- }
-
- protected double pick(UnsortedNumericDoubleValues values) throws IOException {
- throw new IllegalArgumentException("Unsupported sort mode: " + this);
- }
-
- /**
- * Interface allowing custom value generators to be used in MultiValueMode.
- */
- // TODO: why do we need it???
- public interface UnsortedNumericDoubleValues {
- boolean advanceExact(int doc) throws IOException;
- int docValueCount() throws IOException;
- double nextValue() throws IOException;
- }
-
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(this);
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java
index 21cd2347cc61b..aa94bb762596a 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java
@@ -321,8 +321,9 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) {
do {
final IteratorAndCurrent top = pq.top();
- if (top.current.key != key) {
- // the key changes, reduce what we already buffered and reset the buffer for current buckets
+ if (Double.compare(top.current.key, key) != 0) {
+ // The key changes, reduce what we already buffered and reset the buffer for current buckets.
+ // Using Double.compare instead of != to handle NaN correctly.
final Bucket reduced = currentBuckets.get(0).reduce(currentBuckets, reduceContext);
if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) {
reducedBuckets.add(reduced);
@@ -335,7 +336,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) {
if (top.iterator.hasNext()) {
final Bucket next = top.iterator.next();
- assert next.key > top.current.key : "shards must return data sorted by key";
+ assert Double.compare(next.key, top.current.key) > 0 : "shards must return data sorted by key";
top.current = next;
pq.updateTop();
} else {
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
index 06e81242af418..fc41770b37766 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
@@ -40,7 +40,6 @@
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.AnalysisModuleTests.AppendCharFilter;
import org.elasticsearch.plugins.AnalysisPlugin;
-import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -73,7 +72,7 @@ public void setUp() throws Exception {
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.put("index.analysis.analyzer.custom_analyzer.filter", "mock")
.put("index.analysis.normalizer.my_normalizer.type", "custom")
- .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase").build();
+ .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
environment = new Environment(settings);
AnalysisPlugin plugin = new AnalysisPlugin() {
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
index 15a2f9e74a461..bd76557f9a86f 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
@@ -210,7 +210,7 @@ protected void createIndexBasedOnFieldSettings(String index, String alias, TestF
Settings.Builder settings = Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "standard")
- .putArray("index.analysis.analyzer.tv_test.filter", "lowercase");
+ .putList("index.analysis.analyzer.tv_test.filter", "lowercase");
assertAcked(prepareCreate(index).addMapping("type1", mappingBuilder).setSettings(settings).addAlias(new Alias(alias)));
}
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
index bbd7d5501783c..520c881aa7e62 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
@@ -189,7 +189,7 @@ public void testSimpleTermVectors() throws IOException {
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
+ .putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
@@ -261,7 +261,7 @@ public void testRandomSingleTermVectors() throws IOException {
assertAcked(prepareCreate("test").addMapping("type1", mapping)
.setSettings(Settings.builder()
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
+ .putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
@@ -395,7 +395,7 @@ public void testSimpleTermVectorsWithGenerate() throws IOException {
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
+ .putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
ensureGreen();
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
index e4d55da9f92b1..5e81949402055 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
@@ -152,7 +152,7 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio
.field("analyzer", "payload_test").endObject().endObject().endObject().endObject();
Settings setting = Settings.builder()
.put("index.analysis.analyzer.payload_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter")
+ .putList("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter")
.put("index.analysis.filter.my_delimited_payload_filter.delimiter", delimiter)
.put("index.analysis.filter.my_delimited_payload_filter.encoding", encodingString)
.put("index.analysis.filter.my_delimited_payload_filter.type", "mock_payload_filter").build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
index 7b1ac5116f2dd..cdcaf4a1b9c20 100644
--- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
@@ -34,6 +34,8 @@
import org.elasticsearch.test.ESIntegTestCase;
import org.junit.After;
+import java.util.Arrays;
+
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.hamcrest.Matchers.containsString;
@@ -242,11 +244,11 @@ public void testClusterSettingsUpdateResponse() {
public void testCanUpdateTracerSettings() {
ClusterUpdateSettingsResponse clusterUpdateSettingsResponse = client().admin().cluster()
.prepareUpdateSettings()
- .setTransientSettings(Settings.builder().putArray("transport.tracer.include", "internal:index/shard/recovery/*",
+ .setTransientSettings(Settings.builder().putList("transport.tracer.include", "internal:index/shard/recovery/*",
"internal:gateway/local*"))
.get();
- assertArrayEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsArray("transport.tracer.include"), new String[] {"internal:index/shard/recovery/*",
- "internal:gateway/local*"});
+ assertEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsList("transport.tracer.include"),
+ Arrays.asList("internal:index/shard/recovery/*", "internal:gateway/local*"));
}
public void testUpdateDiscoveryPublishTimeout() {
diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
index 172bcd6bd558b..6fd11aa91dce6 100644
--- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
@@ -50,6 +50,7 @@
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
import static org.hamcrest.Matchers.anyOf;
+import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
@@ -415,10 +416,6 @@ public void testShardsAndPreferNodeRouting() {
}
public void testReplicaShardPreferenceIters() throws Exception {
- AllocationService strategy = createAllocationService(Settings.builder()
- .put("cluster.routing.allocation.node_concurrent_recoveries", 10)
- .build());
-
OperationRouting operationRouting = new OperationRouting(Settings.EMPTY, new ClusterSettings(Settings.EMPTY,
ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
@@ -430,69 +427,22 @@ public void testReplicaShardPreferenceIters() throws Exception {
.addAsNew(metaData.index("test"))
.build();
- ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).build();
-
- clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
- .add(newNode("node1"))
- .add(newNode("node2"))
- .add(newNode("node3"))
- .localNodeId("node1")
- ).build();
- clusterState = strategy.reroute(clusterState, "reroute");
-
- clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING));
-
- // When replicas haven't initialized, it comes back with the primary first, then initializing replicas
- GroupShardsIterator shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
- assertThat(shardIterators.size(), equalTo(2)); // two potential shards
- ShardIterator iter = shardIterators.iterator().next();
- assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard
- ShardRouting routing = iter.nextOrNull();
- assertNotNull(routing);
- assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
- assertTrue(routing.primary()); // replicas haven't initialized yet, so primary is first
- assertTrue(routing.started());
- routing = iter.nextOrNull();
- assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
- assertFalse(routing.primary());
- assertTrue(routing.initializing());
- routing = iter.nextOrNull();
- assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
- assertFalse(routing.primary());
- assertTrue(routing.initializing());
-
- clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING));
-
- clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING));
-
-
- shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica");
- assertThat(shardIterators.size(), equalTo(2)); // two potential shards
- iter = shardIterators.iterator().next();
- assertThat(iter.size(), equalTo(2)); // two potential replicas for the shard
- routing = iter.nextOrNull();
- assertNotNull(routing);
- assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
- assertFalse(routing.primary());
- routing = iter.nextOrNull();
- assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
- assertFalse(routing.primary());
-
- shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
- assertThat(shardIterators.size(), equalTo(2)); // two potential shards
- iter = shardIterators.iterator().next();
- assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard
- routing = iter.nextOrNull();
- assertNotNull(routing);
- assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
- assertFalse(routing.primary());
- routing = iter.nextOrNull();
- assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
- assertFalse(routing.primary());
- // finally the primary
- routing = iter.nextOrNull();
- assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1)));
- assertTrue(routing.primary());
+ final ClusterState clusterState = ClusterState
+ .builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
+ .metaData(metaData)
+ .routingTable(routingTable)
+ .nodes(DiscoveryNodes.builder()
+ .add(newNode("node1"))
+ .add(newNode("node2"))
+ .add(newNode("node3"))
+ .localNodeId("node1"))
+ .build();
+
+ String[] removedPreferences = {"_primary", "_primary_first", "_replica", "_replica_first"};
+ for (String pref : removedPreferences) {
+ expectThrows(IllegalArgumentException.class,
+ () -> operationRouting.searchShards(clusterState, new String[]{"test"}, null, pref));
+ }
}
}
diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
index 24f9550a78de6..bd4ac25a8747b 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
@@ -179,8 +179,8 @@ public void testAddConsumerAffix() {
service.applySettings(Settings.builder()
.put("foo.test.bar", 2)
.put("foo.test_1.bar", 7)
- .putArray("foo.test_list.list", "16", "17")
- .putArray("foo.test_list_1.list", "18", "19", "20")
+ .putList("foo.test_list.list", "16", "17")
+ .putList("foo.test_list_1.list", "18", "19", "20")
.build());
assertEquals(2, intResults.get("test").intValue());
assertEquals(7, intResults.get("test_1").intValue());
@@ -195,7 +195,7 @@ public void testAddConsumerAffix() {
service.applySettings(Settings.builder()
.put("foo.test.bar", 2)
.put("foo.test_1.bar", 8)
- .putArray("foo.test_list.list", "16", "17")
+ .putList("foo.test_list.list", "16", "17")
.putNull("foo.test_list_1.list")
.build());
assertNull("test wasn't changed", intResults.get("test"));
@@ -231,8 +231,8 @@ public void testAddConsumerAffixMap() {
service.applySettings(Settings.builder()
.put("foo.test.bar", 2)
.put("foo.test_1.bar", 7)
- .putArray("foo.test_list.list", "16", "17")
- .putArray("foo.test_list_1.list", "18", "19", "20")
+ .putList("foo.test_list.list", "16", "17")
+ .putList("foo.test_list_1.list", "18", "19", "20")
.build());
assertEquals(2, intResults.get("test").intValue());
assertEquals(7, intResults.get("test_1").intValue());
@@ -247,7 +247,7 @@ public void testAddConsumerAffixMap() {
service.applySettings(Settings.builder()
.put("foo.test.bar", 2)
.put("foo.test_1.bar", 8)
- .putArray("foo.test_list.list", "16", "17")
+ .putList("foo.test_list.list", "16", "17")
.putNull("foo.test_list_1.list")
.build());
assertNull("test wasn't changed", intResults.get("test"));
@@ -470,14 +470,14 @@ public void testDiff() throws IOException {
Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY);
assertEquals(2, diff.size());
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
diff = settings.diff(
Settings.builder().put("foo.bar", 5).build(),
- Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build());
+ Settings.builder().put("foo.bar.baz", 17).putList("foo.bar.quux", "d", "e", "f").build());
assertEquals(2, diff.size());
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("d", "e", "f"));
diff = settings.diff(
Settings.builder().put("some.group.foo", 5).build(),
@@ -485,7 +485,7 @@ public void testDiff() throws IOException {
assertEquals(4, diff.size());
assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17));
assertNull(diff.get("some.group.foo"));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
@@ -495,7 +495,7 @@ public void testDiff() throws IOException {
assertEquals(4, diff.size());
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
assertNull(diff.get("some.prefix.foo.somekey"));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
}
@@ -513,14 +513,14 @@ public void testDiffWithAffixAndComplexMatcher() {
Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY);
assertEquals(1, diff.size());
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
- assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
+ assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
diff = settings.diff(
Settings.builder().put("foo.bar", 5).build(),
- Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build());
+ Settings.builder().put("foo.bar.baz", 17).putList("foo.bar.quux", "d", "e", "f").build());
assertEquals(2, diff.size());
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("d", "e", "f"));
diff = settings.diff(
Settings.builder().put("some.group.foo", 5).build(),
@@ -528,7 +528,7 @@ public void testDiffWithAffixAndComplexMatcher() {
assertEquals(3, diff.size());
assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17));
assertNull(diff.get("some.group.foo"));
- assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
+ assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
@@ -538,21 +538,21 @@ public void testDiffWithAffixAndComplexMatcher() {
assertEquals(3, diff.size());
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
assertNull(diff.get("some.prefix.foo.somekey"));
- assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
+ assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
diff = settings.diff(
Settings.builder().put("some.prefix.foo.somekey", 5).build(),
Settings.builder().put("some.prefix.foobar.somekey", 17).put("some.prefix.foo.somekey", 18)
- .putArray("foo.bar.quux", "x", "y", "z")
- .putArray("foo.baz.quux", "d", "e", "f")
+ .putList("foo.bar.quux", "x", "y", "z")
+ .putList("foo.baz.quux", "d", "e", "f")
.build());
assertEquals(5, diff.size());
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
assertNull(diff.get("some.prefix.foo.somekey"));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"x", "y", "z"});
- assertArrayEquals(diff.getAsArray("foo.baz.quux", null), new String[] {"d", "e", "f"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("x", "y", "z"));
+ assertEquals(diff.getAsList("foo.baz.quux", null), Arrays.asList("d", "e", "f"));
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
}
@@ -562,7 +562,7 @@ public void testUpdateTracer() {
AtomicReference> ref = new AtomicReference<>();
settings.addSettingsUpdateConsumer(TransportService.TRACE_LOG_INCLUDE_SETTING, ref::set);
settings.applySettings(Settings.builder()
- .putArray("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build());
+ .putList("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build());
assertNotNull(ref.get().size());
assertEquals(ref.get().size(), 2);
assertTrue(ref.get().contains("internal:index/shard/recovery/*"));
diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
index 4dfedf519bd16..65d51e126c9f6 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
@@ -441,7 +441,7 @@ public void testListSettings() {
assertEquals("foo,bar", value.get(0));
List input = Arrays.asList("test", "test1, test2", "test", ",,,,");
- Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0]));
+ Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0]));
assertTrue(listSetting.exists(builder.build()));
value = listSetting.get(builder.build());
assertEquals(input.size(), value.size());
@@ -464,11 +464,11 @@ public void testListSettings() {
assertEquals(input.size(), ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), input.toArray(new String[0]));
- settingUpdater.apply(Settings.builder().putArray("foo.bar", "123").build(), builder.build());
+ settingUpdater.apply(Settings.builder().putList("foo.bar", "123").build(), builder.build());
assertEquals(1, ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"123"});
- settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putArray("foo.bar", "123").build());
+ settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putList("foo.bar", "123").build());
assertEquals(3, ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"1", "2", "3"});
@@ -492,17 +492,17 @@ public void testListSettings() {
assertEquals(1, value.size());
assertEquals("foo,bar", value.get(0));
- value = settingWithFallback.get(Settings.builder().putArray("foo.bar", "1", "2").build());
+ value = settingWithFallback.get(Settings.builder().putList("foo.bar", "1", "2").build());
assertEquals(2, value.size());
assertEquals("1", value.get(0));
assertEquals("2", value.get(1));
- value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").build());
+ value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").build());
assertEquals(2, value.size());
assertEquals("3", value.get(0));
assertEquals("4", value.get(1));
- value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").putArray("foo.bar", "1", "2").build());
+ value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").putList("foo.bar", "1", "2").build());
assertEquals(2, value.size());
assertEquals("3", value.get(0));
assertEquals("4", value.get(1));
@@ -512,7 +512,7 @@ public void testListSettingAcceptsNumberSyntax() {
Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(),
Property.Dynamic, Property.NodeScope);
List input = Arrays.asList("test", "test1, test2", "test", ",,,,");
- Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0]));
+ Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0]));
// try to parse this really annoying format
for (String key : builder.keys()) {
assertTrue("key: " + key + " doesn't match", listSetting.match(key));
@@ -601,11 +601,11 @@ public void testGetAllConcreteSettings() {
(key) -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope));
Settings settings = Settings.builder()
- .putArray("foo.1.bar", "1", "2")
- .putArray("foo.2.bar", "3", "4", "5")
- .putArray("foo.bar", "6")
- .putArray("some.other", "6")
- .putArray("foo.3.bar", "6")
+ .putList("foo.1.bar", "1", "2")
+ .putList("foo.2.bar", "3", "4", "5")
+ .putList("foo.bar", "6")
+ .putList("some.other", "6")
+ .putList("foo.3.bar", "6")
.build();
Stream>> allConcreteSettings = listAffixSetting.getAllConcreteSettings(settings);
Map> collect = allConcreteSettings.collect(Collectors.toMap(Setting::getKey, (s) -> s.get(settings)));
diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java
index 4a9e55c324653..42cb0f1e3e7e3 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java
@@ -35,11 +35,10 @@
import java.io.ByteArrayInputStream;
import java.io.IOException;
-import java.io.StringBufferInputStream;
-import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@@ -47,7 +46,7 @@
import java.util.NoSuchElementException;
import java.util.Set;
-import static org.hamcrest.Matchers.arrayContaining;
+import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@@ -165,99 +164,99 @@ public void testNames() {
public void testThatArraysAreOverriddenCorrectly() throws IOException {
// overriding a single value with an array
Settings settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1").build())
- .put(Settings.builder().putArray("value", "2", "3").build())
+ .put(Settings.builder().putList("value", "1").build())
+ .put(Settings.builder().putList("value", "2", "3").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
+ assertThat(settings.getAsList("value"), contains("2", "3"));
settings = Settings.builder()
.put(Settings.builder().put("value", "1").build())
- .put(Settings.builder().putArray("value", "2", "3").build())
+ .put(Settings.builder().putList("value", "2", "3").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
+ assertThat(settings.getAsList("value"), contains("2", "3"));
settings = Settings.builder().loadFromSource("value: 1", XContentType.YAML)
.loadFromSource("value: [ 2, 3 ]", XContentType.YAML)
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
+ assertThat(settings.getAsList("value"), contains("2", "3"));
settings = Settings.builder()
.put(Settings.builder().put("value.with.deep.key", "1").build())
- .put(Settings.builder().putArray("value.with.deep.key", "2", "3").build())
+ .put(Settings.builder().putList("value.with.deep.key", "2", "3").build())
.build();
- assertThat(settings.getAsArray("value.with.deep.key"), arrayContaining("2", "3"));
+ assertThat(settings.getAsList("value.with.deep.key"), contains("2", "3"));
// overriding an array with a shorter array
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2").build())
- .put(Settings.builder().putArray("value", "3").build())
+ .put(Settings.builder().putList("value", "1", "2").build())
+ .put(Settings.builder().putList("value", "3").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("3"));
+ assertThat(settings.getAsList("value"), contains("3"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2", "3").build())
- .put(Settings.builder().putArray("value", "4", "5").build())
+ .put(Settings.builder().putList("value", "1", "2", "3").build())
+ .put(Settings.builder().putList("value", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
+ assertThat(settings.getAsList("value"), contains("4", "5"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").build())
- .put(Settings.builder().putArray("value.deep.key", "4", "5").build())
+ .put(Settings.builder().putList("value.deep.key", "1", "2", "3").build())
+ .put(Settings.builder().putList("value.deep.key", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
+ assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
// overriding an array with a longer array
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2").build())
- .put(Settings.builder().putArray("value", "3", "4", "5").build())
+ .put(Settings.builder().putList("value", "1", "2").build())
+ .put(Settings.builder().putList("value", "3", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("3", "4", "5"));
+ assertThat(settings.getAsList("value"), contains("3", "4", "5"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").build())
- .put(Settings.builder().putArray("value.deep.key", "4", "5").build())
+ .put(Settings.builder().putList("value.deep.key", "1", "2", "3").build())
+ .put(Settings.builder().putList("value.deep.key", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
+ assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
// overriding an array with a single value
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2").build())
+ .put(Settings.builder().putList("value", "1", "2").build())
.put(Settings.builder().put("value", "3").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("3"));
+ assertThat(settings.getAsList("value"), contains("3"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value.deep.key", "1", "2").build())
+ .put(Settings.builder().putList("value.deep.key", "1", "2").build())
.put(Settings.builder().put("value.deep.key", "3").build())
.build();
- assertThat(settings.getAsArray("value.deep.key"), arrayContaining("3"));
+ assertThat(settings.getAsList("value.deep.key"), contains("3"));
// test that other arrays are not overridden
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2", "3").putArray("a", "b", "c").build())
- .put(Settings.builder().putArray("value", "4", "5").putArray("d", "e", "f").build())
+ .put(Settings.builder().putList("value", "1", "2", "3").putList("a", "b", "c").build())
+ .put(Settings.builder().putList("value", "4", "5").putList("d", "e", "f").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
- assertThat(settings.getAsArray("a"), arrayContaining("b", "c"));
- assertThat(settings.getAsArray("d"), arrayContaining("e", "f"));
+ assertThat(settings.getAsList("value"), contains("4", "5"));
+ assertThat(settings.getAsList("a"), contains("b", "c"));
+ assertThat(settings.getAsList("d"), contains("e", "f"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").putArray("a", "b", "c").build())
- .put(Settings.builder().putArray("value.deep.key", "4", "5").putArray("d", "e", "f").build())
+ .put(Settings.builder().putList("value.deep.key", "1", "2", "3").putList("a", "b", "c").build())
+ .put(Settings.builder().putList("value.deep.key", "4", "5").putList("d", "e", "f").build())
.build();
- assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
- assertThat(settings.getAsArray("a"), notNullValue());
- assertThat(settings.getAsArray("d"), notNullValue());
+ assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
+ assertThat(settings.getAsList("a"), notNullValue());
+ assertThat(settings.getAsList("d"), notNullValue());
// overriding a deeper structure with an array
settings = Settings.builder()
.put(Settings.builder().put("value.data", "1").build())
- .put(Settings.builder().putArray("value", "4", "5").build())
+ .put(Settings.builder().putList("value", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
+ assertThat(settings.getAsList("value"), contains("4", "5"));
// overriding an array with a deeper structure
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "4", "5").build())
+ .put(Settings.builder().putList("value", "4", "5").build())
.put(Settings.builder().put("value.data", "1").build())
.build();
assertThat(settings.get("value.data"), is("1"));
@@ -477,7 +476,7 @@ public void testWriteSettingsToStream() throws IOException {
Settings.Builder builder = Settings.builder();
builder.put("test.key1.baz", "blah1");
builder.putNull("test.key3.bar");
- builder.putArray("test.key4.foo", "1", "2");
+ builder.putList("test.key4.foo", "1", "2");
builder.setSecureSettings(secureSettings);
assertEquals(7, builder.build().size());
Settings.writeSettingsToStream(builder.build(), out);
@@ -487,7 +486,7 @@ public void testWriteSettingsToStream() throws IOException {
assertEquals("blah1", settings.get("test.key1.baz"));
assertNull(settings.get("test.key3.bar"));
assertTrue(settings.keySet().contains("test.key3.bar"));
- assertArrayEquals(new String[] {"1", "2"}, settings.getAsArray("test.key4.foo"));
+ assertEquals(Arrays.asList("1", "2"), settings.getAsList("test.key4.foo"));
}
public void testSecureSettingConflict() {
@@ -508,7 +507,7 @@ public void testGetAsArrayFailsOnDuplicates() {
public void testToAndFromXContent() throws IOException {
Settings settings = Settings.builder()
- .putArray("foo.bar.baz", "1", "2", "3")
+ .putList("foo.bar.baz", "1", "2", "3")
.put("foo.foobar", 2)
.put("rootfoo", "test")
.put("foo.baz", "1,2,3,4")
@@ -522,7 +521,7 @@ public void testToAndFromXContent() throws IOException {
XContentParser parser = createParser(builder);
Settings build = Settings.fromXContent(parser);
assertEquals(5, build.size());
- assertArrayEquals(new String[] {"1", "2", "3"}, build.getAsArray("foo.bar.baz"));
+ assertEquals(Arrays.asList("1", "2", "3"), build.getAsList("foo.bar.baz"));
assertEquals(2, build.getAsInt("foo.foobar", 0).intValue());
assertEquals("test", build.get("rootfoo"));
assertEquals("1,2,3,4", build.get("foo.baz"));
@@ -542,9 +541,9 @@ public void testSimpleJsonSettings() throws Exception {
// check array
assertNull(settings.get("test1.test3.0"));
assertNull(settings.get("test1.test3.1"));
- assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
- assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
- assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
+ assertThat(settings.getAsList("test1.test3").size(), equalTo(2));
+ assertThat(settings.getAsList("test1.test3").get(0), equalTo("test3-1"));
+ assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2"));
}
public void testDuplicateKeysThrowsException() {
@@ -575,14 +574,14 @@ public void testDuplicateKeysThrowsException() {
public void testToXContent() throws IOException {
// this is just terrible but it's the existing behavior!
- Settings test = Settings.builder().putArray("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build();
+ Settings test = Settings.builder().putList("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build();
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
builder.startObject();
test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap()));
builder.endObject();
assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string());
- test = Settings.builder().putArray("foo.bar", "1", "2", "3").build();
+ test = Settings.builder().putList("foo.bar", "1", "2", "3").build();
builder = XContentBuilder.builder(XContentType.JSON.xContent());
builder.startObject();
test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap()));
@@ -615,18 +614,18 @@ public void testSimpleYamlSettings() throws Exception {
// check array
assertNull(settings.get("test1.test3.0"));
assertNull(settings.get("test1.test3.1"));
- assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
- assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
- assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
+ assertThat(settings.getAsList("test1.test3").size(), equalTo(2));
+ assertThat(settings.getAsList("test1.test3").get(0), equalTo("test3-1"));
+ assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2"));
}
public void testYamlLegacyList() throws IOException {
Settings settings = Settings.builder()
.loadFromStream("foo.yml", new ByteArrayInputStream("foo.bar.baz.0: 1\nfoo.bar.baz.1: 2".getBytes(StandardCharsets.UTF_8)),
false).build();
- assertThat(settings.getAsArray("foo.bar.baz").length, equalTo(2));
- assertThat(settings.getAsArray("foo.bar.baz")[0], equalTo("1"));
- assertThat(settings.getAsArray("foo.bar.baz")[1], equalTo("2"));
+ assertThat(settings.getAsList("foo.bar.baz").size(), equalTo(2));
+ assertThat(settings.getAsList("foo.bar.baz").get(0), equalTo("1"));
+ assertThat(settings.getAsList("foo.bar.baz").get(1), equalTo("2"));
}
public void testIndentation() throws Exception {
@@ -675,14 +674,14 @@ public void testReadLegacyFromStream() throws IOException {
in.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0));
Settings settings = Settings.readSettingsFromStream(in);
assertEquals(2, settings.size());
- assertArrayEquals(new String[]{"0", "1", "2", "3"}, settings.getAsArray("foo.bar"));
+ assertEquals(Arrays.asList("0", "1", "2", "3"), settings.getAsList("foo.bar"));
assertEquals("baz", settings.get("foo.bar.baz"));
}
public void testWriteLegacyOutput() throws IOException {
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0));
- Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3")
+ Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3")
.put("foo.bar.baz", "baz").putNull("foo.null").build();
Settings.writeSettingsToStream(settings, output);
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
@@ -703,7 +702,7 @@ public void testWriteLegacyOutput() throws IOException {
in.setVersion(output.getVersion());
Settings readSettings = Settings.readSettingsFromStream(in);
assertEquals(3, readSettings.size());
- assertArrayEquals(new String[] {"0", "1", "2", "3"}, readSettings.getAsArray("foo.bar"));
+ assertEquals(Arrays.asList("0", "1", "2", "3"), readSettings.getAsList("foo.bar"));
assertEquals(readSettings.get("foo.bar.baz"), "baz");
assertTrue(readSettings.keySet().contains("foo.null"));
assertNull(readSettings.get("foo.null"));
@@ -712,18 +711,18 @@ public void testWriteLegacyOutput() throws IOException {
public void testReadWriteArray() throws IOException {
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(randomFrom(Version.CURRENT, Version.V_6_1_0));
- Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build();
+ Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build();
Settings.writeSettingsToStream(settings, output);
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
Settings build = Settings.readSettingsFromStream(in);
assertEquals(2, build.size());
- assertArrayEquals(build.getAsArray("foo.bar"), new String[] {"0", "1", "2", "3"});
+ assertEquals(build.getAsList("foo.bar"), Arrays.asList("0", "1", "2", "3"));
assertEquals(build.get("foo.bar.baz"), "baz");
}
public void testCopy() {
- Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build();
- assertArrayEquals(new String[] {"0", "1", "2", "3"}, Settings.builder().copy("foo.bar", settings).build().getAsArray("foo.bar"));
+ Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build();
+ assertEquals(Arrays.asList("0", "1", "2", "3"), Settings.builder().copy("foo.bar", settings).build().getAsList("foo.bar"));
assertEquals("baz", Settings.builder().copy("foo.bar.baz", settings).build().get("foo.bar.baz"));
assertNull(Settings.builder().copy("foo.bar.baz", settings).build().get("test"));
assertTrue(Settings.builder().copy("test", settings).build().keySet().contains("test"));
diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java
index bee56c229c02a..e71efa46424b2 100644
--- a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java
@@ -29,7 +29,6 @@
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
-
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
@@ -215,8 +214,8 @@ public void testResponseHeaders() {
public void testCopyHeaders() {
Settings build = Settings.builder().put("request.headers.default", "1").build();
ThreadContext threadContext = new ThreadContext(build);
- threadContext.copyHeaders(Collections.emptyMap().entrySet());
- threadContext.copyHeaders(Collections.singletonMap("foo", "bar").entrySet());
+ threadContext.copyHeaders(Collections.emptyMap().entrySet());
+ threadContext.copyHeaders(Collections.singletonMap("foo", "bar").entrySet());
assertEquals("bar", threadContext.getHeader("foo"));
}
@@ -443,7 +442,7 @@ public void onAfter() {
assertEquals("bar", threadContext.getHeader("foo"));
assertEquals("bar_transient", threadContext.getTransient("foo"));
assertNotNull(threadContext.getTransient("failure"));
- assertEquals("exception from doRun", ((RuntimeException)threadContext.getTransient("failure")).getMessage());
+ assertEquals("exception from doRun", ((RuntimeException) threadContext.getTransient("failure")).getMessage());
assertFalse(threadContext.isDefaultContext());
threadContext.putTransient("after", "after");
}
@@ -604,7 +603,7 @@ protected void doRun() throws Exception {
public void testMarkAsSystemContext() throws IOException {
try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) {
assertFalse(threadContext.isSystemContext());
- try(ThreadContext.StoredContext context = threadContext.stashContext()){
+ try (ThreadContext.StoredContext context = threadContext.stashContext()) {
assertFalse(threadContext.isSystemContext());
threadContext.markAsSystemContext();
assertTrue(threadContext.isSystemContext());
@@ -613,6 +612,17 @@ public void testMarkAsSystemContext() throws IOException {
}
}
+ public void testPutHeaders() {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader(Collections.emptyMap());
+ threadContext.putHeader(Collections.singletonMap("foo", "bar"));
+ assertEquals("bar", threadContext.getHeader("foo"));
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
+ threadContext.putHeader(Collections.singletonMap("foo", "boom")));
+ assertEquals("value for key [foo] already present", e.getMessage());
+ }
+
/**
* Sometimes wraps a Runnable in an AbstractRunnable.
*/
diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
index ed7cdd4d4243d..1a837b825d867 100644
--- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
@@ -59,7 +59,6 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
-import static java.util.Collections.singleton;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -137,7 +136,7 @@ protected MockTransportService build(Settings settings, Version version) {
Settings.builder()
.put(settings)
// trace zenfd actions but keep the default otherwise
- .putArray(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME)
+ .putList(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME)
.build(),
new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService,
namedWriteableRegistry, new NetworkService(Collections.emptyList()), version),
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
index 0492bc82e5f73..3c7a49a176635 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
@@ -179,7 +179,7 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil
final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomNonNegativeLong()).build();
Settings hostsSettings = Settings.builder()
- .putArray("discovery.zen.ping.unicast.hosts",
+ .putList("discovery.zen.ping.unicast.hosts",
NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())),
NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())),
NetworkAddress.format(new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort())),
@@ -305,7 +305,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi
new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort()))});
final Settings hostsSettings = Settings.builder()
- .putArray("discovery.zen.ping.unicast.hosts", "UZP_A", "UZP_B", "UZP_C")
+ .putList("discovery.zen.ping.unicast.hosts", "UZP_A", "UZP_B", "UZP_C")
.put("cluster.name", "test")
.build();
@@ -589,7 +589,7 @@ public void testResolveReuseExistingNodeConnections() throws ExecutionException,
final boolean useHosts = randomBoolean();
final Settings.Builder hostsSettingsBuilder = Settings.builder().put("cluster.name", "test");
if (useHosts) {
- hostsSettingsBuilder.putArray("discovery.zen.ping.unicast.hosts",
+ hostsSettingsBuilder.putList("discovery.zen.ping.unicast.hosts",
NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort()))
);
} else {
diff --git a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
index 51391a8643b48..70df7d33f291c 100644
--- a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
+++ b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
@@ -42,7 +42,7 @@ public Environment newEnvironment(Settings settings) throws IOException {
Settings build = Settings.builder()
.put(settings)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
return new Environment(build);
}
@@ -50,7 +50,7 @@ public void testRepositoryResolution() throws IOException {
Environment environment = newEnvironment();
assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue());
assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue());
- environment = newEnvironment(Settings.builder().putArray(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
+ environment = newEnvironment(Settings.builder().putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue());
assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue());
assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue());
diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
index 42cb4a5811b2e..615a75dda025a 100644
--- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
+++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
@@ -483,7 +483,7 @@ public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException
public Settings buildEnvSettings(Settings settings) {
return Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
.put(settings).build();
}
@@ -491,7 +491,7 @@ public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings)
Settings build = Settings.builder()
.put(settings)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
return new NodeEnvironment(build, new Environment(build));
}
@@ -500,7 +500,7 @@ public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataP
.put(settings)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath)
- .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
return new NodeEnvironment(build, new Environment(build));
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
index 8d8878fa28251..6be786aff88b5 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
@@ -543,7 +543,7 @@ public void testQueryDefaultField() {
);
assertThat(index.getDefaultFields(), equalTo(Collections.singletonList("body")));
index.updateIndexMetaData(
- newIndexMeta("index", Settings.builder().putArray("index.query.default_field", "body", "title").build())
+ newIndexMeta("index", Settings.builder().putList("index.query.default_field", "body", "title").build())
);
assertThat(index.getDefaultFields(), equalTo(Arrays.asList("body", "title")));
}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexSortIT.java b/core/src/test/java/org/elasticsearch/index/IndexSortIT.java
index bb59bc948805c..c981d88a3d1a8 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexSortIT.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexSortIT.java
@@ -26,8 +26,6 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESIntegTestCase;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
import java.io.IOException;
@@ -80,7 +78,7 @@ public void testIndexSort() {
.put(indexSettings())
.put("index.number_of_shards", "1")
.put("index.number_of_replicas", "1")
- .putArray("index.sort.field", "date", "numeric_dv", "keyword_dv")
+ .putList("index.sort.field", "date", "numeric_dv", "keyword_dv")
)
.addMapping("test", TEST_MAPPING)
.get();
@@ -99,7 +97,7 @@ public void testInvalidIndexSort() {
() -> prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
- .putArray("index.sort.field", "invalid_field")
+ .putList("index.sort.field", "invalid_field")
)
.addMapping("test", TEST_MAPPING)
.get()
@@ -110,7 +108,7 @@ public void testInvalidIndexSort() {
() -> prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
- .putArray("index.sort.field", "numeric")
+ .putList("index.sort.field", "numeric")
)
.addMapping("test", TEST_MAPPING)
.get()
@@ -121,7 +119,7 @@ public void testInvalidIndexSort() {
() -> prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
- .putArray("index.sort.field", "keyword")
+ .putList("index.sort.field", "keyword")
)
.addMapping("test", TEST_MAPPING)
.get()
diff --git a/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
index 74ec1cc02d93f..78569d927be76 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
@@ -76,9 +76,9 @@ public void testSimpleIndexSort() throws IOException {
public void testIndexSortWithArrays() throws IOException {
Settings settings = Settings.builder()
- .putArray("index.sort.field", "field1", "field2")
- .putArray("index.sort.order", "asc", "desc")
- .putArray("index.sort.missing", "_last", "_first")
+ .putList("index.sort.field", "field1", "field2")
+ .putList("index.sort.order", "asc", "desc")
+ .putList("index.sort.missing", "_last", "_first")
.build();
IndexSettings indexSettings = indexSettings(settings);
IndexSortConfig config = indexSettings.getIndexSortConfig();
@@ -108,7 +108,7 @@ public void testInvalidIndexSort() throws IOException {
public void testInvalidIndexSortWithArray() throws IOException {
final Settings settings = Settings.builder()
.put("index.sort.field", "field1")
- .putArray("index.sort.order", new String[] {"asc", "desc"})
+ .putList("index.sort.order", new String[] {"asc", "desc"})
.build();
IllegalArgumentException exc =
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
index 9303159c265b9..d93533ffc80d3 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
@@ -129,9 +129,9 @@ public void testConfigureCamelCaseTokenFilter() throws IOException {
.put("index.analysis.filter.testFilter.type", "mock")
.put("index.analysis.filter.test_filter.type", "mock")
.put("index.analysis.analyzer.custom_analyzer_with_camel_case.tokenizer", "standard")
- .putArray("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter")
+ .putList("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter")
.put("index.analysis.analyzer.custom_analyzer_with_snake_case.tokenizer", "standard")
- .putArray("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build();
+ .putList("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
@@ -209,8 +209,8 @@ public void testNoTypeOrTokenizerErrorMessage() throws IOException {
.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, version)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .putArray("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"})
- .putArray("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"})
+ .putList("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"})
+ .putList("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"})
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java
index 4073bbdbbc9c7..e07b4e5b9d435 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java
@@ -29,7 +29,6 @@
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.CharacterCodingException;
-import java.nio.charset.Charset;
import java.nio.charset.MalformedInputException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
@@ -50,7 +49,7 @@ public void testParseStemExclusion() {
assertThat(set.contains("baz"), is(false));
/* Array */
- settings = Settings.builder().putArray("stem_exclusion", "foo","bar").build();
+ settings = Settings.builder().putList("stem_exclusion", "foo","bar").build();
set = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET);
assertThat(set.contains("foo"), is(true));
assertThat(set.contains("bar"), is(true));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
index 66b28ec419a7f..7d8d64e6962d5 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
@@ -42,7 +42,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
public void testBasics() throws IOException {
Settings settings = Settings.builder()
- .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase")
+ .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN);
@@ -57,7 +57,7 @@ public void testBasics() throws IOException {
public void testUnknownType() {
Settings settings = Settings.builder()
.put("index.analysis.normalizer.my_normalizer.type", "foobar")
- .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding")
+ .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
@@ -78,7 +78,7 @@ public void testTokenizer() throws IOException {
public void testCharFilters() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.char_filter.my_mapping.type", "mock_char_filter")
- .putArray("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping")
+ .putList("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN);
@@ -92,7 +92,7 @@ public void testCharFilters() throws IOException {
public void testIllegalFilters() throws IOException {
Settings settings = Settings.builder()
- .putArray("index.analysis.normalizer.my_normalizer.filter", "mock_forbidden")
+ .putList("index.analysis.normalizer.my_normalizer.filter", "mock_forbidden")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
@@ -102,7 +102,7 @@ public void testIllegalFilters() throws IOException {
public void testIllegalCharFilters() throws IOException {
Settings settings = Settings.builder()
- .putArray("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden")
+ .putList("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
index 41deb0bd92352..36c9dee10919f 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
@@ -83,11 +83,11 @@ public void testSynonymWordDeleteByAnalyzer() throws IOException {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!")
+ .putList("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!")
.put("index.analysis.filter.stop_within_synonym.type", "stop")
- .putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
+ .putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
.put("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym")
+ .putList("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym")
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
try {
@@ -104,11 +104,11 @@ public void testExpandSynonymWordDeleteByAnalyzer() throws IOException {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.synonym_expand.type", "synonym")
- .putArray("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!")
+ .putList("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!")
.put("index.analysis.filter.stop_within_synonym.type", "stop")
- .putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
+ .putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
.put("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand")
+ .putList("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand")
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
try {
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
index 3ecef3aa0f514..e67b25b051b4e 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
@@ -70,9 +70,9 @@ protected Collection> getPlugins() {
public void setup() {
indexService = createIndex("test", Settings.builder()
.put("index.analysis.normalizer.my_lowercase.type", "custom")
- .putArray("index.analysis.normalizer.my_lowercase.filter", "lowercase")
+ .putList("index.analysis.normalizer.my_lowercase.filter", "lowercase")
.put("index.analysis.normalizer.my_other_lowercase.type", "custom")
- .putArray("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build());
+ .putList("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build());
parser = indexService.mapperService().documentMapperParser();
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
index c29172d88afa8..94b55fba61870 100644
--- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
@@ -993,7 +993,7 @@ public void testDefaultField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext context = createShardContext();
context.getIndexSettings().updateIndexMetaData(
- newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field",
+ newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field",
STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build())
);
Query query = new QueryStringQueryBuilder("hello")
@@ -1008,7 +1008,7 @@ public void testDefaultField() throws Exception {
// Reset the default value
context.getIndexSettings().updateIndexMetaData(
newIndexMeta("index",
- context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", "*").build())
+ context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build())
);
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
index efacd3c1faba4..bfc6fd0600493 100644
--- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
@@ -575,7 +575,7 @@ public void testDefaultField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext context = createShardContext();
context.getIndexSettings().updateIndexMetaData(
- newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field",
+ newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field",
STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build())
);
Query query = new SimpleQueryStringBuilder("hello")
@@ -590,7 +590,7 @@ public void testDefaultField() throws Exception {
// Reset the default value
context.getIndexSettings().updateIndexMetaData(
newIndexMeta("index",
- context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", "*").build())
+ context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build())
);
}
diff --git a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
index ec5e92ef6e376..aa154d9392574 100644
--- a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
@@ -52,15 +52,15 @@ public void setUp() throws Exception {
Settings.builder()
.put(indexSettings())
.put("index.analysis.filter.syns.type", "synonym")
- .putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz")
+ .putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz")
.put("index.analysis.analyzer.lower_syns.type", "custom")
.put("index.analysis.analyzer.lower_syns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns")
+ .putList("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns")
.put("index.analysis.filter.graphsyns.type", "synonym_graph")
- .putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
+ .putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
.put("index.analysis.analyzer.lower_graphsyns.type", "custom")
.put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
+ .putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
);
assertAcked(builder.addMapping(INDEX, createMapping()));
diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java
index 990bfce9db8e2..70010cdfc2224 100644
--- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java
@@ -30,7 +30,6 @@
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.compress.CompressedXContent;
-import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
@@ -47,7 +46,6 @@
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.instanceOf;
public class MultiMatchQueryTests extends ESSingleNodeTestCase {
@@ -57,7 +55,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
public void setup() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.filter.syns.type","synonym")
- .putArray("index.analysis.filter.syns.synonyms","quick,fast")
+ .putList("index.analysis.filter.syns.synonyms","quick,fast")
.put("index.analysis.analyzer.syns.tokenizer","standard")
.put("index.analysis.analyzer.syns.filter","syns").build();
IndexService indexService = createIndex("test", settings);
diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
index fc8fc12e75d6a..c3d309b486fa0 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
@@ -26,7 +26,6 @@
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.NodeEnvironment.NodePath;
-import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -168,7 +167,7 @@ public void testSelectNewPathForShard() throws Exception {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), path)
- .putArray(Environment.PATH_DATA_SETTING.getKey(), paths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings));
// Make sure all our mocking above actually worked:
diff --git a/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java b/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java
index 0a72037b7d8c0..24ce9b487cc24 100644
--- a/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java
@@ -21,9 +21,7 @@
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FileSwitchDirectory;
import org.apache.lucene.store.MMapDirectory;
-import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.store.SleepingLockWrapper;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings;
@@ -48,7 +46,7 @@ public void testPreload() throws IOException {
private void doTestPreload(String...preload) throws IOException {
Settings build = Settings.builder()
.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mmapfs")
- .putArray(IndexModule.INDEX_STORE_PRE_LOAD_SETTING.getKey(), preload)
+ .putList(IndexModule.INDEX_STORE_PRE_LOAD_SETTING.getKey(), preload)
.build();
IndexSettings settings = IndexSettingsModule.newIndexSettings("foo", build);
IndexStore store = new IndexStore(settings);
diff --git a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java
index c2b394b219a20..b0d4c238679e8 100644
--- a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java
+++ b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java
@@ -33,6 +33,7 @@
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
+import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
@@ -210,7 +211,10 @@ public void testCorruptTranslogTruncation() throws Exception {
logger.info("--> starting the replica node to test recovery");
internalCluster().startNode();
ensureGreen("test");
- assertHitCount(client().prepareSearch("test").setPreference("_replica").setQuery(matchAllQuery()).get(), numDocsToKeep);
+ for (String node : internalCluster().nodesInclude("test")) {
+ SearchRequestBuilder q = client().prepareSearch("test").setPreference("_only_nodes:" + node).setQuery(matchAllQuery());
+ assertHitCount(q.get(), numDocsToKeep);
+ }
final RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").setActiveOnly(false).get();
final RecoveryState replicaRecoveryState = recoveryResponse.shardRecoveryStates().get("test").stream()
.filter(recoveryState -> recoveryState.getPrimary() == false).findFirst().get();
@@ -308,7 +312,9 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception {
logger.info("--> starting the replica node to test recovery");
internalCluster().startNode();
ensureGreen("test");
- assertHitCount(client().prepareSearch("test").setPreference("_replica").setQuery(matchAllQuery()).get(), totalDocs);
+ for (String node : internalCluster().nodesInclude("test")) {
+ assertHitCount(client().prepareSearch("test").setPreference("_only_nodes:" + node).setQuery(matchAllQuery()).get(), totalDocs);
+ }
final RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").setActiveOnly(false).get();
final RecoveryState replicaRecoveryState = recoveryResponse.shardRecoveryStates().get("test").stream()
diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
index d53dba67e0dc4..9f214082d4b22 100644
--- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
@@ -117,9 +117,9 @@ public void testAnalyzeWithNonDefaultPostionLength() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(Settings.builder().put(indexSettings())
.put("index.analysis.filter.syns.type", "synonym")
- .putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge")
+ .putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge")
.put("index.analysis.analyzer.custom_syns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns")));
+ .putList("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns")));
ensureGreen();
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("say what the fudge").setIndex("test").setAnalyzer("custom_syns").get();
@@ -446,7 +446,7 @@ public void testAnalyzeNormalizedKeywordField() throws IOException {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(Settings.builder().put(indexSettings())
.put("index.analysis.normalizer.my_normalizer.type", "custom")
- .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase"))
+ .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase"))
.addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer"));
ensureGreen("test");
diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java
index 185f27d39c8fe..bf213b51475fb 100644
--- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java
@@ -406,8 +406,7 @@ public void onFailure(Exception e) {
}
});
- // Wait for document to be indexed on primary
- assertBusy(() -> assertTrue(client().prepareGet("index", "type", "1").setPreference("_primary").get().isExists()));
+ assertBusy(() -> assertTrue(client().prepareGet("index", "type", "1").get().isExists()));
// The mappings have not been propagated to the replica yet as a consequence the document count not be indexed
// We wait on purpose to make sure that the document is not indexed because the shard operation is stalled
diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
index 1a357c55eb056..d9eb45013263d 100644
--- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
+++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
@@ -19,8 +19,6 @@
package org.elasticsearch.search;
-import com.carrotsearch.randomizedtesting.generators.RandomStrings;
-
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.NumericDocValues;
@@ -41,7 +39,6 @@
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
-import org.elasticsearch.search.MultiValueMode.UnsortedNumericDoubleValues;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@@ -92,7 +89,7 @@ public void testSingleValuedLongs() throws Exception {
docsWithValue.set(i);
}
}
-
+
final Supplier multiValues = () -> DocValues.singleton(new AbstractNumericDocValues() {
int docId = -1;
@Override
@@ -161,6 +158,8 @@ private void verifySortedNumeric(Supplier supplier, int
for (int i = 0; i < maxDoc; ++i) {
assertTrue(selected.advanceExact(i));
final long actual = selected.longValue();
+ verifyLongValueCanCalledMoreThanOnce(selected, actual);
+
long expected = 0;
if (values.advanceExact(i) == false) {
expected = missingValue;
@@ -204,6 +203,12 @@ private void verifySortedNumeric(Supplier supplier, int
}
}
+ private void verifyLongValueCanCalledMoreThanOnce(NumericDocValues values, long expected) throws IOException {
+ for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) {
+ assertEquals(expected, values.longValue());
+ }
+ }
+
private void verifySortedNumeric(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) {
@@ -213,6 +218,8 @@ private void verifySortedNumeric(Supplier supplier, int
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
assertTrue(selected.advanceExact(root));
final long actual = selected.longValue();
+ verifyLongValueCanCalledMoreThanOnce(selected, actual);
+
long expected = 0;
if (mode == MultiValueMode.MAX) {
expected = Long.MIN_VALUE;
@@ -321,14 +328,13 @@ public int docValueCount() {
private void verifySortedNumericDouble(Supplier supplier, int maxDoc) throws IOException {
for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : MultiValueMode.values()) {
- if (MultiValueMode.MEDIAN.equals(mode)) {
- continue;
- }
SortedNumericDoubleValues values = supplier.get();
final NumericDoubleValues selected = mode.select(values, missingValue);
for (int i = 0; i < maxDoc; ++i) {
assertTrue(selected.advanceExact(i));
final double actual = selected.doubleValue();
+ verifyDoubleValueCanCalledMoreThanOnce(selected, actual);
+
double expected = 0.0;
if (values.advanceExact(i) == false) {
expected = missingValue;
@@ -372,6 +378,12 @@ private void verifySortedNumericDouble(Supplier suppl
}
}
+ private void verifyDoubleValueCanCalledMoreThanOnce(NumericDoubleValues values, double expected) throws IOException {
+ for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) {
+ assertTrue(Double.compare(values.doubleValue(), expected) == 0);
+ }
+ }
+
private void verifySortedNumericDouble(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) {
@@ -380,7 +392,9 @@ private void verifySortedNumericDouble(Supplier suppl
int prevRoot = -1;
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
assertTrue(selected.advanceExact(root));
- final double actual = selected.doubleValue();;
+ final double actual = selected.doubleValue();
+ verifyDoubleValueCanCalledMoreThanOnce(selected, actual);
+
double expected = 0.0;
if (mode == MultiValueMode.MAX) {
expected = Long.MIN_VALUE;
@@ -422,7 +436,7 @@ public void testSingleValuedStrings() throws Exception {
final FixedBitSet docsWithValue = randomBoolean() ? null : new FixedBitSet(numDocs);
for (int i = 0; i < array.length; ++i) {
if (randomBoolean()) {
- array[i] = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8));
+ array[i] = new BytesRef(randomAlphaOfLengthBetween(8, 8));
if (docsWithValue != null) {
docsWithValue.set(i);
}
@@ -457,7 +471,7 @@ public void testMultiValuedStrings() throws Exception {
for (int i = 0; i < numDocs; ++i) {
final BytesRef[] values = new BytesRef[randomInt(4)];
for (int j = 0; j < values.length; ++j) {
- values[j] = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8));
+ values[j] = new BytesRef(randomAlphaOfLengthBetween(8, 8));
}
Arrays.sort(values);
array[i] = values;
@@ -490,13 +504,15 @@ public int docValueCount() {
}
private void verifySortedBinary(Supplier supplier, int maxDoc) throws IOException {
- for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) {
+ for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(randomAlphaOfLengthBetween(8, 8)) }) {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) {
SortedBinaryDocValues values = supplier.get();
final BinaryDocValues selected = mode.select(values, missingValue);
for (int i = 0; i < maxDoc; ++i) {
assertTrue(selected.advanceExact(i));
final BytesRef actual = selected.binaryValue();
+ verifyBinaryValueCanCalledMoreThanOnce(selected, actual);
+
BytesRef expected = null;
if (values.advanceExact(i) == false) {
expected = missingValue;
@@ -525,8 +541,14 @@ private void verifySortedBinary(Supplier supplier, int ma
}
}
+ private void verifyBinaryValueCanCalledMoreThanOnce(BinaryDocValues values, BytesRef expected) throws IOException {
+ for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) {
+ assertEquals(values.binaryValue(), expected);
+ }
+ }
+
private void verifySortedBinary(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
- for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) {
+ for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(randomAlphaOfLengthBetween(8, 8)) }) {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) {
SortedBinaryDocValues values = supplier.get();
final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc);
@@ -534,6 +556,8 @@ private void verifySortedBinary(Supplier supplier, int ma
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
assertTrue(selected.advanceExact(root));
final BytesRef actual = selected.binaryValue();
+ verifyBinaryValueCanCalledMoreThanOnce(selected, actual);
+
BytesRef expected = null;
for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) {
if (values.advanceExact(child)) {
@@ -659,7 +683,11 @@ private void verifySortedSet(Supplier supplier, int maxDoc)
SortedSetDocValues values = supplier.get();
final SortedDocValues selected = mode.select(values);
for (int i = 0; i < maxDoc; ++i) {
- final long actual = selected.advanceExact(i) ? selected.ordValue() : -1;
+ long actual = -1;
+ if (selected.advanceExact(i)) {
+ actual = selected.ordValue();
+ verifyOrdValueCanCalledMoreThanOnce(selected, selected.ordValue());
+ }
int expected = -1;
if (values.advanceExact(i)) {
for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) {
@@ -680,13 +708,23 @@ private void verifySortedSet(Supplier supplier, int maxDoc)
}
}
+ private void verifyOrdValueCanCalledMoreThanOnce(SortedDocValues values, long expected) throws IOException {
+ for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) {
+ assertEquals(values.ordValue(), expected);
+ }
+ }
+
private void verifySortedSet(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) {
SortedSetDocValues values = supplier.get();
final SortedDocValues selected = mode.select(values, rootDocs, new BitSetIterator(innerDocs, 0L));
int prevRoot = -1;
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
- final int actual = selected.advanceExact(root) ? selected.ordValue() : -1;
+ int actual = -1;
+ if (selected.advanceExact(root)) {
+ actual = selected.ordValue();
+ verifyOrdValueCanCalledMoreThanOnce(selected, actual);
+ }
int expected = -1;
for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) {
if (values.advanceExact(child)) {
@@ -711,126 +749,6 @@ private void verifySortedSet(Supplier supplier, int maxDoc,
}
}
- public void testUnsortedSingleValuedDoubles() throws Exception {
- final int numDocs = scaledRandomIntBetween(1, 100);
- final double[] array = new double[numDocs];
- final FixedBitSet docsWithValue = randomBoolean() ? null : new FixedBitSet(numDocs);
- for (int i = 0; i < array.length; ++i) {
- if (randomBoolean()) {
- array[i] = randomDouble();
- if (docsWithValue != null) {
- docsWithValue.set(i);
- }
- } else if (docsWithValue != null && randomBoolean()) {
- docsWithValue.set(i);
- }
- }
- final NumericDoubleValues singleValues = new NumericDoubleValues() {
- private int docID;
- @Override
- public boolean advanceExact(int doc) throws IOException {
- docID = doc;
- return docsWithValue == null || docsWithValue.get(docID);
- }
- @Override
- public double doubleValue() {
- return array[docID];
- }
- };
- final SortedNumericDoubleValues singletonValues = FieldData.singleton(singleValues);
- final MultiValueMode.UnsortedNumericDoubleValues multiValues = new MultiValueMode.UnsortedNumericDoubleValues() {
-
- @Override
- public int docValueCount() {
- return singletonValues.docValueCount();
- }
-
- @Override
- public boolean advanceExact(int doc) throws IOException {
- return singletonValues.advanceExact(doc);
- }
-
- @Override
- public double nextValue() throws IOException {
- return Math.cos(singletonValues.nextValue());
- }
- };
- verifyUnsortedNumeric(() -> multiValues, numDocs);
- }
-
- public void testUnsortedMultiValuedDoubles() throws Exception {
- final int numDocs = scaledRandomIntBetween(1, 100);
- final double[][] array = new double[numDocs][];
- for (int i = 0; i < numDocs; ++i) {
- final double[] values = new double[randomInt(4)];
- for (int j = 0; j < values.length; ++j) {
- values[j] = randomDouble();
- }
- Arrays.sort(values);
- array[i] = values;
- }
- final MultiValueMode.UnsortedNumericDoubleValues multiValues = new MultiValueMode.UnsortedNumericDoubleValues() {
- int doc;
- int i;
-
- @Override
- public int docValueCount() {
- return array[doc].length;
- }
-
- @Override
- public boolean advanceExact(int doc) {
- this.doc = doc;
- i = 0;
- return array[doc].length > 0;
- }
-
- @Override
- public double nextValue() {
- return Math.sin(array[doc][i++]);
- }
- };
- verifyUnsortedNumeric(() -> multiValues, numDocs);
- }
-
- private void verifyUnsortedNumeric(Supplier supplier, int maxDoc) throws IOException {
- for (double missingValue : new double[] { 0, randomDouble() }) {
- for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) {
- UnsortedNumericDoubleValues values = supplier.get();
- final NumericDoubleValues selected = mode.select(values, missingValue);
- for (int i = 0; i < maxDoc; ++i) {
- assertTrue(selected.advanceExact(i));
- final double actual = selected.doubleValue();
- double expected = 0.0;
- if (values.advanceExact(i) == false) {
- expected = missingValue;
- } else {
- int numValues = values.docValueCount();
- if (mode == MultiValueMode.MAX) {
- expected = Long.MIN_VALUE;
- } else if (mode == MultiValueMode.MIN) {
- expected = Long.MAX_VALUE;
- }
- for (int j = 0; j < numValues; ++j) {
- if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) {
- expected += values.nextValue();
- } else if (mode == MultiValueMode.MIN) {
- expected = Math.min(expected, values.nextValue());
- } else if (mode == MultiValueMode.MAX) {
- expected = Math.max(expected, values.nextValue());
- }
- }
- if (mode == MultiValueMode.AVG) {
- expected = expected/numValues;
- }
- }
-
- assertEquals(mode.toString() + " docId=" + i, expected, actual, 0.1);
- }
- }
- }
- }
-
public void testValidOrdinals() {
assertThat(MultiValueMode.SUM.ordinal(), equalTo(0));
assertThat(MultiValueMode.AVG.ordinal(), equalTo(1));
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java
index af826a7d7900e..8c383e799fee5 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java
@@ -23,12 +23,15 @@
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.BucketOrder;
+import org.elasticsearch.search.aggregations.InternalAggregation;
+import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -63,6 +66,27 @@ protected InternalHistogram createTestInstance(String name,
return new InternalHistogram(name, buckets, order, 1, null, format, keyed, pipelineAggregators, metaData);
}
+ // issue 26787
+ public void testHandlesNaN() {
+ InternalHistogram histogram = createTestInstance();
+ InternalHistogram histogram2 = createTestInstance();
+ List buckets = histogram.getBuckets();
+ if (buckets == null || buckets.isEmpty()) {
+ return;
+ }
+
+ // Set the key of one bucket to NaN. Must be the last bucket because NaN is greater than everything else.
+ List newBuckets = new ArrayList<>(buckets.size());
+ if (buckets.size() > 1) {
+ newBuckets.addAll(buckets.subList(0, buckets.size() - 1));
+ }
+ InternalHistogram.Bucket b = buckets.get(buckets.size() - 1);
+ newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, keyed, b.format, b.aggregations));
+
+ InternalHistogram newHistogram = histogram.create(newBuckets);
+ newHistogram.doReduce(Arrays.asList(newHistogram, histogram2), new InternalAggregation.ReduceContext(null, null, false));
+ }
+
@Override
protected void assertReduced(InternalHistogram reduced, List inputs) {
Map expectedCounts = new TreeMap<>();
diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java
index 1ed396672b5d0..4748b6292c417 100644
--- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java
+++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java
@@ -75,8 +75,8 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas)
// we want to make sure that while recovery happens, and a replica gets recovered, its properly refreshed
ClusterHealthStatus status = client().admin().cluster().prepareHealth("test").get().getStatus();;
while (status != ClusterHealthStatus.GREEN) {
- // first, verify that search on the primary search works
- SearchResponse searchResponse = client().prepareSearch("test").setPreference("_primary").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
+ // first, verify that search normal search works
+ SearchResponse searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
assertHitCount(searchResponse, 1);
Client client = client();
searchResponse = client.prepareSearch("test").setPreference(preference + Integer.toString(counter++)).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
@@ -93,8 +93,6 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas)
status = client().admin().cluster().prepareHealth("test").get().getStatus();
internalCluster().ensureAtLeastNumDataNodes(numberOfReplicas + 1);
}
- SearchResponse searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
- assertHitCount(searchResponse, 1);
cluster().wipeIndices("test");
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java
index 763518804e277..761f9798d7286 100644
--- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java
+++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java
@@ -301,7 +301,6 @@ public void testMatchedWithShould() throws Exception {
.should(queryStringQuery("dolor").queryName("dolor"))
.should(queryStringQuery("elit").queryName("elit"))
)
- .setPreference("_primary")
.get();
assertHitCount(searchResponse, 2L);
diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
index 34aa2ab3117aa..faf1f65f34bda 100644
--- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
@@ -1358,9 +1358,9 @@ public void testPhrasePrefix() throws IOException {
Builder builder = Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.synonym.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
+ .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "quick => fast");
+ .putList("index.analysis.filter.synonym.synonyms", "quick => fast");
assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping()));
@@ -2773,9 +2773,9 @@ public void testSynonyms() throws IOException {
Builder builder = Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.synonym.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
+ .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "fast,quick");
+ .putList("index.analysis.filter.synonym.synonyms", "fast,quick");
assertAcked(prepareCreate("test").setSettings(builder.build())
.addMapping("type1", "field1",
diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
index ab23dfbe21928..58565b5f264b7 100644
--- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
+++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
@@ -38,9 +38,7 @@
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.rescore.QueryRescoreMode;
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
-import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
-import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Arrays;
@@ -159,9 +157,9 @@ public void testRescorePhrase() throws Exception {
public void testMoreDocs() throws Exception {
Builder builder = Settings.builder();
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
- builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
+ builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
builder.put("index.analysis.filter.synonym.type", "synonym");
- builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
+ builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
@@ -237,9 +235,9 @@ public void testMoreDocs() throws Exception {
public void testSmallRescoreWindow() throws Exception {
Builder builder = Settings.builder();
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
- builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
+ builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
builder.put("index.analysis.filter.synonym.type", "synonym");
- builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
+ builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
@@ -309,9 +307,9 @@ public void testSmallRescoreWindow() throws Exception {
public void testRescorerMadeScoresWorse() throws Exception {
Builder builder = Settings.builder();
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
- builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
+ builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
builder.put("index.analysis.filter.synonym.type", "synonym");
- builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
+ builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java
index 31366c2534cb2..257089c90545f 100644
--- a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java
+++ b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java
@@ -107,7 +107,7 @@ public void testConsistentHitsWithSameSeed() throws Exception {
for (int o = 0; o < outerIters; o++) {
final int seed = randomInt();
String preference = randomRealisticUnicodeOfLengthBetween(1, 10); // at least one char!!
- // randomPreference should not start with '_' (reserved for known preference types (e.g. _shards, _primary)
+ // randomPreference should not start with '_' (reserved for known preference types (e.g. _shards)
while (preference.startsWith("_")) {
preference = randomRealisticUnicodeOfLengthBetween(1, 10);
}
diff --git a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java
index 6478446a1a254..8cbb626b6770e 100644
--- a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java
+++ b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java
@@ -44,7 +44,6 @@
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.not;
-import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -67,7 +66,7 @@ public void testStopOneNodePreferenceWithRedState() throws InterruptedException,
refresh();
internalCluster().stopRandomDataNode();
client().admin().cluster().prepareHealth().setWaitForStatus(ClusterHealthStatus.RED).execute().actionGet();
- String[] preferences = new String[] {"_primary", "_local", "_primary_first", "_prefer_nodes:somenode", "_prefer_nodes:server2", "_prefer_nodes:somenode,server2"};
+ String[] preferences = new String[]{"_local", "_prefer_nodes:somenode", "_prefer_nodes:server2", "_prefer_nodes:somenode,server2"};
for (String pref : preferences) {
logger.info("--> Testing out preference={}", pref);
SearchResponse searchResponse = client().prepareSearch().setSize(0).setPreference(pref).execute().actionGet();
@@ -113,54 +112,14 @@ public void testSimplePreference() throws Exception {
client().prepareIndex("test", "type1").setSource("field1", "value1").execute().actionGet();
refresh();
- SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
- searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
-
- searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_primary").execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
- searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_primary").execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
-
- searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
- searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet();
+ SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
- searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
- searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet();
+ searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("1234").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
- searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("1234").execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
- }
-
- public void testReplicaPreference() throws Exception {
- client().admin().indices().prepareCreate("test").setSettings("{\"number_of_replicas\": 0}", XContentType.JSON).get();
- ensureGreen();
-
- client().prepareIndex("test", "type1").setSource("field1", "value1").execute().actionGet();
- refresh();
-
- try {
- client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet();
- fail("should have failed because there are no replicas");
- } catch (Exception e) {
- // pass
- }
-
- SearchResponse resp = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet();
- assertThat(resp.getHits().getTotalHits(), equalTo(1L));
-
- client().admin().indices().prepareUpdateSettings("test").setSettings("{\"number_of_replicas\": 1}", XContentType.JSON).get();
- ensureGreen("test");
-
- resp = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet();
- assertThat(resp.getHits().getTotalHits(), equalTo(1L));
}
public void testThatSpecifyingNonExistingNodesReturnsUsefulError() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java
index d5198485351b1..14378fdb1c8a9 100644
--- a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java
+++ b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java
@@ -134,14 +134,12 @@ public void testProfileMatchesRegular() throws Exception {
.setQuery(q)
.setProfile(false)
.addSort("_id", SortOrder.ASC)
- .setPreference("_primary")
.setSearchType(SearchType.QUERY_THEN_FETCH);
SearchRequestBuilder profile = client().prepareSearch("test")
.setQuery(q)
.setProfile(true)
.addSort("_id", SortOrder.ASC)
- .setPreference("_primary")
.setSearchType(SearchType.QUERY_THEN_FETCH);
MultiSearchResponse.Item[] responses = client().prepareMultiSearch()
diff --git a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java
index ac72fc6fcb96d..ab8bcb539d6ae 100644
--- a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java
@@ -19,7 +19,6 @@
package org.elasticsearch.search.query;
-import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
@@ -52,7 +51,6 @@
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@@ -266,10 +264,10 @@ private void setupIndexWithGraph(String index) throws Exception {
Settings.builder()
.put(indexSettings())
.put("index.analysis.filter.graphsyns.type", "synonym_graph")
- .putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
+ .putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
.put("index.analysis.analyzer.lower_graphsyns.type", "custom")
.put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
+ .putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject(index).startObject("properties")
diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
index 9f7aaff4489d8..3ad7a83ef19db 100644
--- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
@@ -349,7 +349,7 @@ public void testCommonTermsQueryStackedTokens() throws Exception {
.put(indexSettings())
.put(SETTING_NUMBER_OF_SHARDS,1)
.put("index.analysis.filter.syns.type","synonym")
- .putArray("index.analysis.filter.syns.synonyms","quick,fast")
+ .putList("index.analysis.filter.syns.synonyms","quick,fast")
.put("index.analysis.analyzer.syns.tokenizer","whitespace")
.put("index.analysis.analyzer.syns.filter","syns")
)
@@ -1572,9 +1572,9 @@ public void testMatchQueryWithSynonyms() throws IOException {
.put("index.analysis.analyzer.index.filter", "lowercase")
.put("index.analysis.analyzer.search.type", "custom")
.put("index.analysis.analyzer.search.tokenizer", "standard")
- .putArray("index.analysis.analyzer.search.filter", "lowercase", "synonym")
+ .putList("index.analysis.analyzer.search.filter", "lowercase", "synonym")
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "fast, quick"));
+ .putList("index.analysis.filter.synonym.synonyms", "fast, quick"));
assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search"));
client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get();
@@ -1602,9 +1602,9 @@ public void testQueryStringWithSynonyms() throws IOException {
.put("index.analysis.analyzer.index.filter", "lowercase")
.put("index.analysis.analyzer.search.type", "custom")
.put("index.analysis.analyzer.search.tokenizer", "standard")
- .putArray("index.analysis.analyzer.search.filter", "lowercase", "synonym")
+ .putList("index.analysis.analyzer.search.filter", "lowercase", "synonym")
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "fast, quick"));
+ .putList("index.analysis.filter.synonym.synonyms", "fast, quick"));
assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search"));
client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get();
@@ -1807,7 +1807,7 @@ public void testNGramCopyField() {
.put("index.analysis.tokenizer.my_ngram_tokenizer.type", "nGram")
.put("index.analysis.tokenizer.my_ngram_tokenizer.min_gram", "1")
.put("index.analysis.tokenizer.my_ngram_tokenizer.max_gram", "10")
- .putArray("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0]));
+ .putList("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0]));
assertAcked(builder.addMapping("test", "origin", "type=text,copy_to=meta", "meta", "type=text,analyzer=my_ngram_analyzer"));
// we only have ngrams as the index analyzer so searches will get standard analyzer
diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java
index 88c403a1d7fb5..9eacb0e81bd29 100644
--- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java
@@ -79,7 +79,7 @@ public void testSearchRandomPreference() throws InterruptedException, ExecutionE
int iters = scaledRandomIntBetween(10, 20);
for (int i = 0; i < iters; i++) {
String randomPreference = randomUnicodeOfLengthBetween(0, 4);
- // randomPreference should not start with '_' (reserved for known preference types (e.g. _shards, _primary)
+ // randomPreference should not start with '_' (reserved for known preference types (e.g. _shards)
while (randomPreference.startsWith("_")) {
randomPreference = randomUnicodeOfLengthBetween(0, 4);
}
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
index 3cbee6adc4161..01b16bb9fb698 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
@@ -528,9 +528,9 @@ public void testThatSynonymsWork() throws Exception {
Settings.Builder settingsBuilder = Settings.builder()
.put("analysis.analyzer.suggest_analyzer_synonyms.type", "custom")
.put("analysis.analyzer.suggest_analyzer_synonyms.tokenizer", "standard")
- .putArray("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms")
+ .putList("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms")
.put("analysis.filter.my_synonyms.type", "synonym")
- .putArray("analysis.filter.my_synonyms.synonyms", "foo,renamed");
+ .putList("analysis.filter.my_synonyms.synonyms", "foo,renamed");
completionMappingBuilder.searchAnalyzer("suggest_analyzer_synonyms").indexAnalyzer("suggest_analyzer_synonyms");
createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder);
@@ -806,7 +806,7 @@ public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exce
public void testThatSuggestStopFilterWorks() throws Exception {
Settings.Builder settingsBuilder = Settings.builder()
.put("index.analysis.analyzer.stoptest.tokenizer", "standard")
- .putArray("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter")
+ .putList("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter")
.put("index.analysis.filter.suggest_stop_filter.type", "stop")
.put("index.analysis.filter.suggest_stop_filter.remove_trailing", false);
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
index 541cafc7962b2..b0b655b0f8b2a 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
@@ -30,7 +30,6 @@
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
-import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.TemplateScript;
@@ -173,7 +172,7 @@ public void testSuggestModes() throws IOException {
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.put("index.analysis.analyzer.biword.tokenizer", "standard")
- .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
+ .putList("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
.put("index.analysis.filter.shingler.type", "shingle")
.put("index.analysis.filter.shingler.min_shingle_size", 2)
.put("index.analysis.filter.shingler.max_shingle_size", 3));
@@ -253,7 +252,7 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.biword.tokenizer", "standard")
- .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
+ .putList("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
.put("index.analysis.filter.shingler.type", "shingle")
.put("index.analysis.filter.shingler.min_shingle_size", 2)
.put("index.analysis.filter.shingler.max_shingle_size", 3));
@@ -427,7 +426,7 @@ public void testStopwordsOnlyPhraseSuggest() throws IOException {
assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd").setSettings(
Settings.builder()
.put("index.analysis.analyzer.stopwd.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.stopwd.filter", "stop")
+ .putList("index.analysis.analyzer.stopwd.filter", "stop")
));
ensureGreen();
index("test", "typ1", "1", "body", "this is a test");
@@ -444,9 +443,9 @@ public void testPrefixLength() throws IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .putList("index.analysis.analyzer.body.filter", "lowercase")
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", false)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -482,9 +481,9 @@ public void testBasicPhraseSuggest() throws IOException, URISyntaxException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .putList("index.analysis.analyzer.body.filter", "lowercase")
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", false)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -615,9 +614,9 @@ public void testSizeParam() throws IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .putList("index.analysis.analyzer.body.filter", "lowercase")
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", false)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -685,7 +684,7 @@ public void testShardFailures() throws IOException, InterruptedException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.suggest.tokenizer", "standard")
- .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
+ .putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
.put("index.analysis.filter.shingler.type", "shingle")
.put("index.analysis.filter.shingler.min_shingle_size", 2)
.put("index.analysis.filter.shingler.max_shingle_size", 5)
@@ -745,7 +744,7 @@ public void testEmptyShards() throws IOException, InterruptedException {
assertAcked(prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.suggest.tokenizer", "standard")
- .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
+ .putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
.put("index.analysis.filter.shingler.type", "shingle")
.put("index.analysis.filter.shingler.min_shingle_size", 2)
.put("index.analysis.filter.shingler.max_shingle_size", 5)
@@ -781,7 +780,7 @@ public void testSearchForRarePhrase() throws IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase", "my_shingle")
+ .putList("index.analysis.analyzer.body.filter", "lowercase", "my_shingle")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", true)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -836,7 +835,7 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi
.put(indexSettings())
.put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
.put("index.analysis.analyzer.text.tokenizer", "standard")
- .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
+ .putList("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", true)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -1026,7 +1025,7 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE
.put(indexSettings())
.put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
.put("index.analysis.analyzer.text.tokenizer", "standard")
- .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
+ .putList("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", true)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
index 5883d3a5645cd..a5e92d89906cc 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
@@ -1827,7 +1827,7 @@ public void testChangeSettingsOnRestore() throws Exception {
.put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s")
.put("index.analysis.analyzer.my_analyzer.type", "custom")
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard")
- .putArray("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym")
+ .putList("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym")
.put("index.analysis.filter.my_synonym.type", "synonym")
.put("index.analysis.filter.my_synonym.synonyms", "foo => bar");
diff --git a/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
index aa4c7415a4c45..8e0c039176207 100644
--- a/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
@@ -125,8 +125,8 @@ public void testGroupClusterIndices() throws IOException {
transportService.start();
transportService.acceptIncomingRequests();
Settings.Builder builder = Settings.builder();
- builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
- builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
+ builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
+ builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
assertFalse(service.isCrossClusterSearchEnabled());
service.initializeRemoteClusters();
@@ -171,8 +171,8 @@ public void testIncrementallyAddClusters() throws IOException {
transportService.start();
transportService.acceptIncomingRequests();
Settings.Builder builder = Settings.builder();
- builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
- builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
+ builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
+ builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) {
assertFalse(service.isCrossClusterSearchEnabled());
service.initializeRemoteClusters();
@@ -225,9 +225,9 @@ public void testRemoteNodeAttribute() throws IOException, InterruptedException {
transportService.start();
transportService.acceptIncomingRequests();
final Settings.Builder builder = Settings.builder();
- builder.putArray(
+ builder.putList(
"search.remote.cluster_1.seeds", c1N1Node.getAddress().toString());
- builder.putArray(
+ builder.putList(
"search.remote.cluster_2.seeds", c2N1Node.getAddress().toString());
try (RemoteClusterService service =
new RemoteClusterService(settings, transportService)) {
@@ -302,9 +302,9 @@ public void testCollectNodes() throws InterruptedException, IOException {
transportService.start();
transportService.acceptIncomingRequests();
final Settings.Builder builder = Settings.builder();
- builder.putArray(
+ builder.putList(
"search.remote.cluster_1.seeds", c1N1Node.getAddress().toString());
- builder.putArray(
+ builder.putList(
"search.remote.cluster_2.seeds", c2N1Node.getAddress().toString());
try (RemoteClusterService service =
new RemoteClusterService(settings, transportService)) {
diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
index 6f8cbb6a222d2..a87f428fec51e 100644
--- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
@@ -180,9 +180,9 @@ public void testExplainMatchPhrasePrefix() {
assertAcked(prepareCreate("test").setSettings(
Settings.builder().put(indexSettings())
.put("index.analysis.filter.syns.type", "synonym")
- .putArray("index.analysis.filter.syns.synonyms", "one,two")
+ .putList("index.analysis.filter.syns.synonyms", "one,two")
.put("index.analysis.analyzer.syns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.syns.filter", "syns")
+ .putList("index.analysis.analyzer.syns.filter", "syns")
).addMapping("test", "field","type=text,analyzer=syns"));
ensureGreen();
diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle
index f2c4a09edd4d8..c84c6a6d5a227 100644
--- a/distribution/bwc/build.gradle
+++ b/distribution/bwc/build.gradle
@@ -63,42 +63,44 @@ if (enabled) {
}
File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}")
+ final String remote = System.getProperty("tests.bwc.remote", "elastic")
+
task createClone(type: LoggedExec) {
onlyIf { checkoutDir.exists() == false }
commandLine = ['git', 'clone', rootDir, checkoutDir]
}
- task findUpstream(type: LoggedExec) {
+ task findRemote(type: LoggedExec) {
dependsOn createClone
workingDir = checkoutDir
commandLine = ['git', 'remote', '-v']
doLast {
- project.ext.upstreamExists = false
+ project.ext.remoteExists = false
output.toString('UTF-8').eachLine {
- if (it.contains("upstream")) {
- project.ext.upstreamExists = true
+ if (it.contains("${remote}\thttps://github.com/${remote}/elasticsearch.git")) {
+ project.ext.remoteExists = true
}
}
}
}
- task addUpstream(type: LoggedExec) {
- dependsOn findUpstream
- onlyIf { project.ext.upstreamExists == false }
+ task addRemote(type: LoggedExec) {
+ dependsOn findRemote
+ onlyIf { project.ext.remoteExists == false }
workingDir = checkoutDir
- commandLine = ['git', 'remote', 'add', 'upstream', 'https://github.com/elastic/elasticsearch.git']
+ commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"]
}
task fetchLatest(type: LoggedExec) {
onlyIf { project.gradle.startParameter.isOffline() == false }
- dependsOn addUpstream
+ dependsOn addRemote
workingDir = checkoutDir
commandLine = ['git', 'fetch', '--all']
}
String buildMetadataKey = "bwc_refspec_${project.path.substring(1)}"
task checkoutBwcBranch(type: LoggedExec) {
- String refspec = System.getProperty("tests.bwc.refspec", buildMetadata.get(buildMetadataKey, "upstream/${bwcBranch}"))
+ String refspec = System.getProperty("tests.bwc.refspec", buildMetadata.get(buildMetadataKey, "${remote}/${bwcBranch}"))
dependsOn fetchLatest
workingDir = checkoutDir
commandLine = ['git', 'checkout', refspec]
diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
index 11953cce537a2..ea72e07e337b8 100644
--- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
@@ -402,7 +402,7 @@ deprecated[6.0.0, Use `_key` instead of `_time` to order buckets by their dates/
There are some cases where date histogram can't help us, like for example, when we need
to aggregate the results by day of the week.
-In this case to overcame the problem, we can use a script that returns the day of the week:
+In this case to overcome the problem, we can use a script that returns the day of the week:
[source,js]
diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc
index 2a252595dd59a..11b2347e7f31b 100644
--- a/docs/reference/docs/get.asciidoc
+++ b/docs/reference/docs/get.asciidoc
@@ -275,10 +275,6 @@ replicas.
The `preference` can be set to:
-`_primary`::
- The operation will go and be executed only on the primary
- shards.
-
`_local`::
The operation will prefer to be executed on a local
allocated shard if possible.
diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc
index 8e18f3034e82b..7875f011abee1 100644
--- a/docs/reference/docs/index_.asciidoc
+++ b/docs/reference/docs/index_.asciidoc
@@ -91,8 +91,7 @@ will control the version of the document the operation is intended to be
executed against. A good example of a use case for versioning is
performing a transactional read-then-update. Specifying a `version` from
the document initially read ensures no changes have happened in the
-meantime (when reading in order to update, it is recommended to set
-`preference` to `_primary`). For example:
+meantime. For example:
[source,js]
--------------------------------------------------
@@ -242,7 +241,7 @@ The result of the above index operation is:
[[index-routing]]
=== Routing
-By default, shard placement — or `routing` — is controlled by using a
+By default, shard placement ? or `routing` ? is controlled by using a
hash of the document's id value. For more explicit control, the value
fed into the hash function used by the router can be directly specified
on a per-operation basis using the `routing` parameter. For example:
diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc
index bb7908a80c7b8..00876365a27de 100755
--- a/docs/reference/getting-started.asciidoc
+++ b/docs/reference/getting-started.asciidoc
@@ -148,6 +148,16 @@ And now we are ready to start our node and single cluster:
./elasticsearch
--------------------------------------------------
+[float]
+=== Instalation with Homebrew
+
+On macOS, Elasticsearch can also be installed via https://brew.sh[Homebrew]:
+
+["source","sh"]
+--------------------------------------------------
+brew install elasticsearch
+--------------------------------------------------
+
[float]
=== Installation example with MSI Windows Installer
diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc
index 668857ed41e4d..db7479f9f7d38 100644
--- a/docs/reference/how-to/indexing-speed.asciidoc
+++ b/docs/reference/how-to/indexing-speed.asciidoc
@@ -114,6 +114,13 @@ The default is `10%` which is often plenty: for example, if you give the JVM
10GB of memory, it will give 1GB to the index buffer, which is enough to host
two shards that are heavily indexing.
+[float]
+=== Disable `_field_names`
+
+The <> introduces some
+index-time overhead, so you might want to disable it if you never need to
+run `exists` queries.
+
[float]
=== Additional optimizations
diff --git a/docs/reference/mapping/fields/field-names-field.asciidoc b/docs/reference/mapping/fields/field-names-field.asciidoc
index 45839ac55d950..9dd1f17cbb3a9 100644
--- a/docs/reference/mapping/fields/field-names-field.asciidoc
+++ b/docs/reference/mapping/fields/field-names-field.asciidoc
@@ -35,3 +35,25 @@ GET my_index/_search
// CONSOLE
<1> Querying on the `_field_names` field (also see the <> query)
+
+
+==== Disabling `_field_names`
+
+Because `_field_names` introduce some index-time overhead, you might want to
+disable this field if you want to optimize for indexing speed and do not need
+`exists` queries.
+
+[source,js]
+--------------------------------------------------
+PUT tweets
+{
+ "mappings": {
+ "tweet": {
+ "_field_names": {
+ "enabled": false
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
diff --git a/docs/reference/migration/migrate_7_0/cluster.asciidoc b/docs/reference/migration/migrate_7_0/cluster.asciidoc
index 12e6916e00145..e9584074d73d2 100644
--- a/docs/reference/migration/migrate_7_0/cluster.asciidoc
+++ b/docs/reference/migration/migrate_7_0/cluster.asciidoc
@@ -6,8 +6,11 @@
Due to cross-cluster search using `:` to separate a cluster and index name,
cluster names may no longer contain `:`.
-==== new default for `wait_for_active_shards` parameter of the open index command
+==== New default for `wait_for_active_shards` parameter of the open index command
The default value for the `wait_for_active_shards` parameter of the open index API
is changed from 0 to 1, which means that the command will now by default wait for all
primary shards of the opened index to be allocated.
+
+==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed
+These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences.
diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc
index 64e9e2e1663aa..d269ce0456763 100644
--- a/docs/reference/modules/snapshots.asciidoc
+++ b/docs/reference/modules/snapshots.asciidoc
@@ -1,39 +1,55 @@
[[modules-snapshots]]
== Snapshot And Restore
-The snapshot and restore module allows to create snapshots of individual
-indices or an entire cluster into a remote repository like shared file system,
-S3, or HDFS. These snapshots are great for backups because they can be restored
-relatively quickly but they are not archival because they can only be restored
-to versions of Elasticsearch that can read the index. That means that:
+You can store snapshots of individual indices or an entire cluster in
+a remote repository like a shared file system, S3, or HDFS. These snapshots
+are great for backups because they can be restored relatively quickly. However,
+snapshots can only be restored to versions of Elasticsearch that can read the
+indices:
+* A snapshot of an index created in 5.x can be restored to 6.x.
* A snapshot of an index created in 2.x can be restored to 5.x.
* A snapshot of an index created in 1.x can be restored to 2.x.
-* A snapshot of an index created in 1.x can **not** be restored to 5.x.
-
-To restore a snapshot of an index created in 1.x to 5.x you can restore it to
-a 2.x cluster and use <> to rebuild
-the index in a 5.x cluster. This is as time consuming as restoring from
-archival copies of the original data.
-
-Note: If a repository is connected to a 2.x cluster, and you want to connect
-a 5.x cluster to the same repository, you will have to either first set the 2.x
-repository to `readonly` mode (see below for details on `readonly` mode) or create
-the 5.x repository in `readonly` mode. A 5.x cluster will update the repository
-to conform to 5.x specific formats, which will mean that any new snapshots written
-via the 2.x cluster will not be visible to the 5.x cluster, and vice versa.
-In fact, as a general rule, only one cluster should connect to the same repository
-location with write access; all other clusters connected to the same repository
-should be set to `readonly` mode. While setting all but one repositories to
-`readonly` should work with multiple clusters differing by one major version,
-it is not a supported configuration.
+Conversely, snapshots of indices created in 1.x **cannot** be restored to
+5.x or 6.x, and snapshots of indices created in 2.x **cannot** be restored
+to 6.x.
+
+Snapshots are incremental and can contain indices created in various
+versions of Elasticsearch. If any indices in a snapshot were created in an
+incompatible version, you will not be able restore the snapshot.
+
+IMPORTANT: When backing up your data prior to an upgrade, keep in mind that you
+won't be able to restore snapshots after you upgrade if they contain indices
+created in a version that's incompatible with the upgrade version.
+
+If you end up in a situation where you need to restore a snapshot of an index
+that is incompatible with the version of the cluster you are currently running,
+you can restore it on the latest compatible version and use
+<> to rebuild the index on the current
+version. Reindexing from remote is only possible if the original index has
+source enabled. Retrieving and reindexing the data can take significantly longer
+than simply restoring a snapshot. If you have a large amount of data, we
+recommend testing the reindex from remote process with a subset of your data to
+understand the time requirements before proceeding.
[float]
=== Repositories
-Before any snapshot or restore operation can be performed, a snapshot repository should be registered in
-Elasticsearch. The repository settings are repository-type specific. See below for details.
+You must register a snapshot repository before you can perform snapshot and
+restore operations. We recommend creating a new snapshot repository for each
+major version. The valid repository settings depend on the repository type.
+
+If you register same snapshot repository with multiple clusters, only
+one cluster should have write access to the repository. All other clusters
+connected to that repository should set the repository to `readonly` mode.
+
+NOTE: The snapshot format can change across major versions, so if you have
+clusters on different major versions trying to write the same repository,
+new snapshots written by one version will not be visible to the other. While
+setting the repository to `readonly` on all but one of the clusters should work
+with multiple clusters differing by one major version, it is not a supported
+configuration.
[source,js]
-----------------------------------
@@ -48,7 +64,7 @@ PUT /_snapshot/my_backup
// CONSOLE
// TESTSETUP
-Once a repository is registered, its information can be obtained using the following command:
+To retrieve information about a registered repository, use a GET request:
[source,js]
-----------------------------------
@@ -71,9 +87,11 @@ which returns:
-----------------------------------
// TESTRESPONSE
-Information about multiple repositories can be fetched in one go by using a comma-delimited list of repository names.
-Star wildcards are supported as well. For example, information about repositories that start with `repo` or that contain `backup`
-can be obtained using the following command:
+To retrieve information about multiple repositories, specify a
+a comma-delimited list of repositories. You can also use the * wildcard when
+specifying repository names. For example, the following request retrieves
+information about all of the snapshot repositories that start with `repo` or
+contain `backup`:
[source,js]
-----------------------------------
@@ -81,8 +99,8 @@ GET /_snapshot/repo*,*backup*
-----------------------------------
// CONSOLE
-If a repository name is not specified, or `_all` is used as repository name Elasticsearch will return information about
-all repositories currently registered in the cluster:
+To retrieve information about all registered snapshot repositories, omit the
+repository name or specify `_all`:
[source,js]
-----------------------------------
diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc
index d0f60d700a82c..dbd9055ff8c86 100644
--- a/docs/reference/search/request/preference.asciidoc
+++ b/docs/reference/search/request/preference.asciidoc
@@ -7,21 +7,6 @@ search. By default, the operation is randomized among the available shard copies
The `preference` is a query string parameter which can be set to:
[horizontal]
-`_primary`::
- The operation will go and be executed only on the primary
- shards.
-
-`_primary_first`::
- The operation will go and be executed on the primary
- shard, and if not available (failover), will execute on other shards.
-
-`_replica`::
- The operation will go and be executed only on a replica shard.
-
-`_replica_first`::
- The operation will go and be executed only on a replica shard, and if
- not available (failover), will execute on other shards.
-
`_local`::
The operation will prefer to be executed on a local
allocated shard if possible.
@@ -33,7 +18,7 @@ The `preference` is a query string parameter which can be set to:
`_shards:2,3`::
Restricts the operation to the specified shards. (`2`
and `3` in this case). This preference can be combined with other
- preferences but it has to appear first: `_shards:2,3|_primary`
+ preferences but it has to appear first: `_shards:2,3|_local`
`_only_nodes`::
Restricts the operation to nodes specified in <>
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java
index ba815a8796bc2..be1f2495f0b23 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java
@@ -29,6 +29,7 @@
import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import java.util.Set;
/**
@@ -53,10 +54,10 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory {
CJKBigramFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
outputUnigrams = settings.getAsBoolean("output_unigrams", false);
- final String[] asArray = settings.getAsArray("ignored_scripts");
+ final List asArray = settings.getAsList("ignored_scripts");
Set scripts = new HashSet<>(Arrays.asList("han", "hiragana", "katakana", "hangul"));
if (asArray != null) {
- scripts.removeAll(Arrays.asList(asArray));
+ scripts.removeAll(asArray);
}
int flags = 0;
for (String script : scripts) {
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java
index 9ee889e3af610..760c1c79ba4cd 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java
@@ -26,6 +26,7 @@
import org.elasticsearch.index.analysis.AbstractCharFilterFactory;
import java.io.Reader;
+import java.util.List;
import java.util.Set;
import static java.util.Collections.unmodifiableSet;
@@ -36,8 +37,8 @@ public class HtmlStripCharFilterFactory extends AbstractCharFilterFactory {
HtmlStripCharFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name);
- String[] escapedTags = settings.getAsArray("escaped_tags");
- if (escapedTags.length > 0) {
+ List escapedTags = settings.getAsList("escaped_tags");
+ if (escapedTags.size() > 0) {
this.escapedTags = unmodifiableSet(newHashSet(escapedTags));
} else {
this.escapedTags = null;
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java
index 4da560836eb13..0f94b521e4b7d 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java
@@ -27,8 +27,8 @@
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
-import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import java.util.Set;
/**
@@ -48,12 +48,12 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory {
KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
- final String[] arrayKeepTypes = settings.getAsArray(KEEP_TYPES_KEY, null);
+ final List arrayKeepTypes = settings.getAsList(KEEP_TYPES_KEY, null);
if ((arrayKeepTypes == null)) {
throw new IllegalArgumentException("keep_types requires `" + KEEP_TYPES_KEY + "` to be configured");
}
- this.keepTypes = new HashSet<>(Arrays.asList(arrayKeepTypes));
+ this.keepTypes = new HashSet<>(arrayKeepTypes);
}
@Override
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java
index 521e89b35e235..df67f24cc7f5f 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java
@@ -22,7 +22,6 @@
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.KeepWordFilter;
-import org.apache.lucene.util.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@@ -31,6 +30,8 @@
import org.elasticsearch.index.analysis.StopTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
+import java.util.List;
+
/**
* A {@link TokenFilterFactory} for {@link KeepWordFilter}. This filter only
* keep tokens that are contained in the term set configured via
@@ -61,7 +62,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory {
KeepWordFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
- final String[] arrayKeepWords = settings.getAsArray(KEEP_WORDS_KEY, null);
+ final List arrayKeepWords = settings.getAsList(KEEP_WORDS_KEY, null);
final String keepWordsPath = settings.get(KEEP_WORDS_PATH_KEY, null);
if ((arrayKeepWords == null && keepWordsPath == null) || (arrayKeepWords != null && keepWordsPath != null)) {
// we don't allow both or none
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java
index bf6315dd12193..7e69e44ffff24 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java
@@ -27,6 +27,7 @@
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
+import java.util.List;
import java.util.regex.Pattern;
public class PatternCaptureGroupTokenFilterFactory extends AbstractTokenFilterFactory {
@@ -37,13 +38,13 @@ public class PatternCaptureGroupTokenFilterFactory extends AbstractTokenFilterFa
PatternCaptureGroupTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
- String[] regexes = settings.getAsArray(PATTERNS_KEY, null, false);
+ List regexes = settings.getAsList(PATTERNS_KEY, null, false);
if (regexes == null) {
throw new IllegalArgumentException("required setting '" + PATTERNS_KEY + "' is missing for token filter [" + name + "]");
}
- patterns = new Pattern[regexes.length];
- for (int i = 0; i < regexes.length; i++) {
- patterns[i] = Pattern.compile(regexes[i]);
+ patterns = new Pattern[regexes.size()];
+ for (int i = 0; i < regexes.size(); i++) {
+ patterns[i] = Pattern.compile(regexes.get(i));
}
preserveOriginal = settings.getAsBoolean(PRESERVE_ORIG_KEY, true);
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
index da9ab1090c174..8efc0d5941f9e 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
@@ -56,7 +56,7 @@ public void testDefault() throws IOException {
public void testWithoutCommonWordsMatch() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_default.type", "common_grams")
- .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
+ .putList("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
@@ -75,7 +75,7 @@ public void testWithoutCommonWordsMatch() throws IOException {
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_default.type", "common_grams")
.put("index.analysis.filter.common_grams_default.query_mode", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
+ .putList("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
{
@@ -94,7 +94,7 @@ public void testSettings() throws IOException {
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_1.type", "common_grams")
.put("index.analysis.filter.common_grams_1.ignore_case", true)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_1");
@@ -109,7 +109,7 @@ public void testSettings() throws IOException {
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_2.type", "common_grams")
.put("index.analysis.filter.common_grams_2.ignore_case", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_2");
@@ -122,7 +122,7 @@ public void testSettings() throws IOException {
}
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_3.type", "common_grams")
- .putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
@@ -166,7 +166,7 @@ public void testQueryModeSettings() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_1.type", "common_grams")
.put("index.analysis.filter.common_grams_1.query_mode", true)
- .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_1.ignore_case", true)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
@@ -181,7 +181,7 @@ public void testQueryModeSettings() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_2.type", "common_grams")
.put("index.analysis.filter.common_grams_2.query_mode", true)
- .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_2.ignore_case", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
@@ -196,7 +196,7 @@ public void testQueryModeSettings() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_3.type", "common_grams")
.put("index.analysis.filter.common_grams_3.query_mode", true)
- .putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
@@ -210,7 +210,7 @@ public void testQueryModeSettings() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_4.type", "common_grams")
.put("index.analysis.filter.common_grams_4.query_mode", true)
- .putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java
index bb1f2a55f7cb4..6b4682d04a128 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java
@@ -71,7 +71,7 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException {
.put("analysis.tokenizer.autocomplete.token_chars", "letter,digit")
.put("analysis.tokenizer.autocomplete.type", "nGram")
.put("analysis.filter.wordDelimiter.type", "word_delimiter")
- .putArray("analysis.filter.wordDelimiter.type_table",
+ .putList("analysis.filter.wordDelimiter.type_table",
"& => ALPHANUM", "| => ALPHANUM", "! => ALPHANUM",
"? => ALPHANUM", ". => ALPHANUM", "- => ALPHANUM",
"# => ALPHANUM", "% => ALPHANUM", "+ => ALPHANUM",
@@ -88,10 +88,10 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException {
.put("analysis.filter.wordDelimiter.catenate_all", false)
.put("analysis.analyzer.autocomplete.tokenizer", "autocomplete")
- .putArray("analysis.analyzer.autocomplete.filter",
+ .putList("analysis.analyzer.autocomplete.filter",
"lowercase", "wordDelimiter")
.put("analysis.analyzer.search_autocomplete.tokenizer", "whitespace")
- .putArray("analysis.analyzer.search_autocomplete.filter",
+ .putList("analysis.analyzer.search_autocomplete.filter",
"lowercase", "wordDelimiter")));
client().prepareIndex("test", "test", "1")
.setSource("name", "ARCOTEL Hotels Deutschland").get();
@@ -121,7 +121,7 @@ public void testMultiPhraseCutoff() throws IOException {
.put("analysis.filter.wordDelimiter.catenate_numbers", true)
.put("analysis.filter.wordDelimiter.catenate_all", false)
.put("analysis.analyzer.custom_analyzer.tokenizer", "whitespace")
- .putArray("analysis.analyzer.custom_analyzer.filter",
+ .putList("analysis.analyzer.custom_analyzer.filter",
"lowercase", "wordDelimiter"))
);
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java
index a7b3c6e61f006..e9248c3d21289 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java
@@ -76,7 +76,7 @@ public void testKeepWordsPathSettings() {
}
settings = Settings.builder().put(settings)
- .putArray("index.analysis.filter.non_broken_keep_filter.keep_words", "test")
+ .putList("index.analysis.filter.non_broken_keep_filter.keep_words", "test")
.build();
try {
// test our none existing setup is picked up
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java
index 4df1fb780e932..a19882d6faa00 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java
@@ -38,7 +38,7 @@ public void testKeepTypes() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.keep_numbers.type", "keep_types")
- .putArray("index.analysis.filter.keep_numbers.types", new String[] {"", ""})
+ .putList("index.analysis.filter.keep_numbers.types", new String[] {"", ""})
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers");
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java
index 081580a6ae93a..f454e8c776c12 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java
@@ -42,9 +42,9 @@ public void testCreateIndexWithMassiveWordList() {
.put("index.number_of_shards", 1)
.put("analysis.analyzer.test_analyzer.type", "custom")
.put("analysis.analyzer.test_analyzer.tokenizer", "standard")
- .putArray("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase")
+ .putList("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase")
.put("analysis.filter.dictionary_decompounder.type", "dictionary_decompounder")
- .putArray("analysis.filter.dictionary_decompounder.word_list", wordList)
+ .putList("analysis.filter.dictionary_decompounder.word_list", wordList)
).get();
}
}
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java
index 24efd89b7e0c8..3f4641c7c189b 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java
@@ -78,7 +78,7 @@ public void testNoTokenChars() throws IOException {
final String name = "ngr";
final Settings indexSettings = newAnalysisSettingsBuilder().build();
final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 4)
- .putArray("token_chars", new String[0]).build();
+ .putList("token_chars", new String[0]).build();
Tokenizer tokenizer = new NGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings)
.create();
tokenizer.setReader(new StringReader("1.34"));
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
index 6ed5f0b66cf9b..e25891aca4e35 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
@@ -25,6 +25,7 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Objects;
import java.util.function.Function;
import org.elasticsearch.ExceptionsHelper;
@@ -61,7 +62,8 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
@Override
public void execute(IngestDocument ingestDocument) throws Exception {
- String date = ingestDocument.getFieldValue(field, String.class);
+ // Date can be specified as a string or long:
+ String date = Objects.toString(ingestDocument.getFieldValue(field, Object.class));
DateTime dateTime = null;
Exception lastException = null;
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
index 19d791dd8648c..6736594613954 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
@@ -62,6 +62,11 @@ public void testUnixMs()throws Exception {
Collections.singletonMap("_field", "1000500"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
+
+ document = new IngestDocument("_index", "_type", "_id", null, null,
+ Collections.singletonMap("_field", 1000500L));
+ dateProcessor.execute(document);
+ assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
}
public void testUnix()throws Exception {
diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
index 17dc2740ee426..b50eb788c6f57 100644
--- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
+++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
@@ -38,6 +38,7 @@
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ClassPermission;
import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.FilterScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptException;
@@ -107,6 +108,9 @@ protected Class> loadClass(String name, boolean resolve) throws ClassNotFoundE
} else if (context.instanceClazz.equals(ExecutableScript.class)) {
ExecutableScript.Factory factory = (p) -> new ExpressionExecutableScript(expr, p);
return context.factoryClazz.cast(factory);
+ } else if (context.instanceClazz.equals(FilterScript.class)) {
+ FilterScript.Factory factory = (p, lookup) -> newFilterScript(expr, lookup, p);
+ return context.factoryClazz.cast(factory);
}
throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]");
}
@@ -236,6 +240,27 @@ private SearchScript.LeafFactory newSearchScript(Expression expr, SearchLookup l
return new ExpressionSearchScript(expr, bindings, specialValue, needsScores);
}
+ /**
+ * This is a hack for filter scripts, which must return booleans instead of doubles as expression do.
+ * See https://github.com/elastic/elasticsearch/issues/26429.
+ */
+ private FilterScript.LeafFactory newFilterScript(Expression expr, SearchLookup lookup, @Nullable Map vars) {
+ SearchScript.LeafFactory searchLeafFactory = newSearchScript(expr, lookup, vars);
+ return ctx -> {
+ SearchScript script = searchLeafFactory.newInstance(ctx);
+ return new FilterScript(vars, lookup, ctx) {
+ @Override
+ public boolean execute() {
+ return script.runAsDouble() != 0.0;
+ }
+ @Override
+ public void setDocument(int docid) {
+ script.setDocument(docid);
+ }
+ };
+ };
+ }
+
/**
* converts a ParseException at compile-time or link-time to a ScriptException
*/
diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java
index d8d09ffba790a..9a91fccf4ad30 100644
--- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java
+++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java
@@ -700,4 +700,19 @@ public void testBoolean() throws Exception {
assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
}
+
+ public void testFilterScript() throws Exception {
+ createIndex("test");
+ ensureGreen("test");
+ indexRandom(true,
+ client().prepareIndex("test", "doc", "1").setSource("foo", 1.0),
+ client().prepareIndex("test", "doc", "2").setSource("foo", 0.0));
+ SearchRequestBuilder builder = buildRequest("doc['foo'].value");
+ Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap());
+ builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script)));
+ SearchResponse rsp = builder.get();
+ assertSearchResponse(rsp);
+ assertEquals(1, rsp.getHits().getTotalHits());
+ assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ }
}
diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle
index dddc98ae138ad..bc8a118ba1d64 100644
--- a/modules/lang-painless/build.gradle
+++ b/modules/lang-painless/build.gradle
@@ -146,7 +146,7 @@ task regen {
fileset(dir: outputPath, includes: 'Painless*.java')
}
// fix line endings
- ant.fixcrlf(srcdir: outputPath) {
+ ant.fixcrlf(srcdir: outputPath, eol: 'lf') {
patternset(includes: 'Painless*.java')
}
}
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
index 5636833442d9f..893efeb6957ae 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
@@ -22,7 +22,6 @@
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.IntSet;
import io.netty.bootstrap.ServerBootstrap;
-import io.netty.channel.AdaptiveRecvByteBufAllocator;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler;
@@ -88,7 +87,6 @@
import java.util.regex.Pattern;
import static org.elasticsearch.common.settings.Setting.boolSetting;
-import static org.elasticsearch.common.settings.Setting.byteSizeSetting;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS;
import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_HEADERS;
@@ -132,10 +130,6 @@ public class Netty4HttpServerTransport extends AbstractLifecycleComponent implem
public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE =
Setting.byteSizeSetting("http.netty.receive_predictor_size", new ByteSizeValue(64, ByteSizeUnit.KB), Property.NodeScope);
- public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN =
- byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope);
- public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX =
- byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope);
protected final NetworkService networkService;
@@ -227,17 +221,8 @@ public Netty4HttpServerTransport(Settings settings, NetworkService networkServic
this.tcpReceiveBufferSize = SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE.get(settings);
this.detailedErrorsEnabled = SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings);
- // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one
- ByteSizeValue receivePredictorMin = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN.get(settings);
- ByteSizeValue receivePredictorMax = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX.get(settings);
- if (receivePredictorMax.getBytes() == receivePredictorMin.getBytes()) {
- recvByteBufAllocator = new FixedRecvByteBufAllocator(Math.toIntExact(receivePredictorMax.getBytes()));
- } else {
- recvByteBufAllocator = new AdaptiveRecvByteBufAllocator(
- Math.toIntExact(receivePredictorMin.getBytes()),
- Math.toIntExact(receivePredictorMin.getBytes()),
- Math.toIntExact(receivePredictorMax.getBytes()));
- }
+ ByteSizeValue receivePredictor = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE.get(settings);
+ recvByteBufAllocator = new FixedRecvByteBufAllocator(receivePredictor.bytesAsInt());
this.compression = SETTING_HTTP_COMPRESSION.get(settings);
this.compressionLevel = SETTING_HTTP_COMPRESSION_LEVEL.get(settings);
@@ -253,9 +238,8 @@ public Netty4HttpServerTransport(Settings settings, NetworkService networkServic
this.maxContentLength = maxContentLength;
logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], " +
- "receive_predictor[{}->{}], pipelining[{}], pipelining_max_events[{}]",
- maxChunkSize, maxHeaderSize, maxInitialLineLength, this.maxContentLength,
- receivePredictorMin, receivePredictorMax, pipelining, pipeliningMaxEvents);
+ "receive_predictor[{}], pipelining[{}], pipelining_max_events[{}]",
+ maxChunkSize, maxHeaderSize, maxInitialLineLength, this.maxContentLength, receivePredictor, pipelining, pipeliningMaxEvents);
}
public Settings settings() {
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java
index 49fc35cb7cfd6..4c842d5a4dca7 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java
@@ -56,8 +56,6 @@ public List> getSettings() {
Netty4HttpServerTransport.SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS,
Netty4HttpServerTransport.SETTING_HTTP_WORKER_COUNT,
Netty4HttpServerTransport.SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE,
- Netty4HttpServerTransport.SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN,
- Netty4HttpServerTransport.SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX,
Netty4Transport.WORKER_COUNT,
Netty4Transport.NETTY_RECEIVE_PREDICTOR_SIZE,
Netty4Transport.NETTY_RECEIVE_PREDICTOR_MIN,
diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
index 14fa5922c1d90..fa1999cf17e39 100644
--- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
+++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
@@ -37,6 +37,7 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@@ -63,7 +64,7 @@ private ICUTokenizerConfig getIcuConfig(Environment env, Settings settings) {
Map tailored = new HashMap<>();
try {
- String[] ruleFiles = settings.getAsArray(RULE_FILES);
+ List ruleFiles = settings.getAsList(RULE_FILES);
for (String scriptAndResourcePath : ruleFiles) {
int colonPos = scriptAndResourcePath.indexOf(":");
diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java
index ff4ab4943e30f..52dabef7c5dff 100644
--- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java
+++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java
@@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
-import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import org.apache.commons.codec.Encoder;
import org.apache.commons.codec.language.Caverphone1;
@@ -50,7 +50,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
private final Encoder encoder;
private final boolean replace;
private int maxcodelength;
- private String[] languageset;
+ private List languageset;
private NameType nametype;
private RuleType ruletype;
@@ -82,7 +82,7 @@ public PhoneticTokenFilterFactory(IndexSettings indexSettings, Environment envir
this.maxcodelength = settings.getAsInt("max_code_len", 4);
} else if ("bm".equalsIgnoreCase(encodername) || "beider_morse".equalsIgnoreCase(encodername) || "beidermorse".equalsIgnoreCase(encodername)) {
this.encoder = null;
- this.languageset = settings.getAsArray("languageset");
+ this.languageset = settings.getAsList("languageset");
String ruleType = settings.get("rule_type", "approx");
if ("approx".equalsIgnoreCase(ruleType)) {
ruletype = RuleType.APPROX;
@@ -117,7 +117,7 @@ public TokenStream create(TokenStream tokenStream) {
if (encoder == null) {
if (ruletype != null && nametype != null) {
if (languageset != null) {
- final LanguageSet languages = LanguageSet.from(new HashSet<>(Arrays.asList(languageset)));
+ final LanguageSet languages = LanguageSet.from(new HashSet<>(languageset));
return new BeiderMorseFilter(tokenStream, new PhoneticEngine(nametype, ruletype, true), languages);
}
return new BeiderMorseFilter(tokenStream, new PhoneticEngine(nametype, ruletype, true));
diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
index f3685278dc6b9..e7986cb878e41 100644
--- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
+++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
@@ -229,7 +229,7 @@ public void testFilterByTags() throws InterruptedException {
public void testFilterByMultipleTags() throws InterruptedException {
int nodes = randomIntBetween(5, 10);
Settings nodeSettings = Settings.builder()
- .putArray(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod")
+ .putList(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod")
.build();
int prodInstances = 0;
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
index 5ae30c74a3226..31ea9bdb1c21e 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
@@ -128,7 +128,7 @@ public void testNodesWithDifferentTagsAndOneTagSet() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
- .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
+ .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -140,7 +140,7 @@ public void testNodesWithDifferentTagsAndTwoTagSet() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
- .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
+ .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -162,7 +162,7 @@ public void testNodesWithSameTagsAndOneTagSet() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
- .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
+ .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -173,7 +173,7 @@ public void testNodesWithSameTagsAndTwoTagsSet() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
- .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
+ .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -183,7 +183,7 @@ public void testNodesWithSameTagsAndTwoTagsSet() {
public void testMultipleZonesAndTwoNodesInSameZone() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -193,7 +193,7 @@ public void testMultipleZonesAndTwoNodesInSameZone() {
public void testMultipleZonesAndTwoNodesInDifferentZones() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -206,7 +206,7 @@ public void testMultipleZonesAndTwoNodesInDifferentZones() {
public void testZeroNode43() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -226,7 +226,7 @@ public void testIllegalSettingsMissingAllRequired() {
public void testIllegalSettingsMissingProject() {
Settings nodeSettings = Settings.builder()
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
try {
@@ -258,7 +258,7 @@ public void testIllegalSettingsMissingZone() {
public void testNoRegionReturnsEmptyList() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java
index a2afbccf27a53..6d609bd08d2c6 100644
--- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java
+++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java
@@ -39,7 +39,7 @@ public class AzureRepositorySettingsTests extends ESTestCase {
private AzureRepository azureRepository(Settings settings) throws StorageException, IOException, URISyntaxException {
Settings internalSettings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
.put(settings)
.build();
return new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), new Environment(internalSettings),
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java
index 50a9f3426acbd..9ba59f8d49727 100644
--- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java
@@ -80,7 +80,7 @@ public void testEnvironmentPaths() throws Exception {
Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.resolve("home").toString());
- settingsBuilder.putArray(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(),
+ settingsBuilder.putList(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(),
esHome.resolve("data2").toString());
settingsBuilder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), esHome.resolve("custom").toString());
settingsBuilder.put(Environment.PATH_LOGS_SETTING.getKey(), esHome.resolve("logs").toString());
@@ -153,7 +153,7 @@ public void testDuplicateDataPaths() throws IOException {
Settings
.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), home.toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString())
+ .putList(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString())
.build();
final Environment environment = new Environment(settings);
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java
index 3eebf4a2f6481..8192a8c8a29c5 100644
--- a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java
@@ -50,7 +50,7 @@ public void testMissingWritePermission() throws IOException {
PosixFilePermission.OWNER_READ)));
Settings build = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
IOException ioException = expectThrows(IOException.class, () -> {
new NodeEnvironment(build, new Environment(build));
});
@@ -70,7 +70,7 @@ public void testMissingWritePermissionOnIndex() throws IOException {
PosixFilePermission.OWNER_READ)));
Settings build = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
IOException ioException = expectThrows(IOException.class, () -> {
new NodeEnvironment(build, new Environment(build));
});
@@ -95,7 +95,7 @@ public void testMissingWritePermissionOnShard() throws IOException {
PosixFilePermission.OWNER_READ)));
Settings build = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
IOException ioException = expectThrows(IOException.class, () -> {
new NodeEnvironment(build, new Environment(build));
});
diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle
index 95e62416cade3..f271dae5cfda1 100644
--- a/qa/full-cluster-restart/build.gradle
+++ b/qa/full-cluster-restart/build.gradle
@@ -52,6 +52,9 @@ for (Version version : indexCompatVersions) {
// some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m'
+ // debug logging for testRecovery
+ setting 'logger.level', 'DEBUG'
+
if (version.onOrAfter('5.3.0')) {
setting 'http.content_type.required', 'true'
}
@@ -72,6 +75,9 @@ for (Version version : indexCompatVersions) {
// some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m'
+ // debug logging for testRecovery
+ setting 'logger.level', 'DEBUG'
+
numNodes = 2
dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir }
cleanShared = false // We want to keep snapshots made by the old cluster!
@@ -81,6 +87,7 @@ for (Version version : indexCompatVersions) {
systemProperty 'tests.is_old_cluster', 'false'
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo")
+
}
Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") {
diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
index c7e708418c92c..22859859f2521 100644
--- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
+++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
@@ -25,6 +25,7 @@
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Response;
+import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -37,12 +38,15 @@
import org.junit.Before;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -227,17 +231,15 @@ public void testNewReplicasWork() throws Exception {
Map recoverRsp = toMap(client().performRequest("GET", "/" + index + "/_recovery"));
logger.debug("--> recovery status:\n{}", recoverRsp);
- Map responseBody = toMap(client().performRequest("GET", "/" + index + "/_search",
- Collections.singletonMap("preference", "_primary")));
- assertNoFailures(responseBody);
- int foundHits1 = (int) XContentMapValues.extractValue("hits.total", responseBody);
-
- responseBody = toMap(client().performRequest("GET", "/" + index + "/_search",
- Collections.singletonMap("preference", "_replica")));
- assertNoFailures(responseBody);
- int foundHits2 = (int) XContentMapValues.extractValue("hits.total", responseBody);
- assertEquals(foundHits1, foundHits2);
- // TODO: do something more with the replicas! index?
+ Set counts = new HashSet<>();
+ for (String node : dataNodes(index, client())) {
+ Map responseBody = toMap(client().performRequest("GET", "/" + index + "/_search",
+ Collections.singletonMap("preference", "_only_nodes:" + node)));
+ assertNoFailures(responseBody);
+ int hits = (int) XContentMapValues.extractValue("hits.total", responseBody);
+ counts.add(hits);
+ }
+ assertEquals("All nodes should have a consistent number of documents", 1, counts.size());
}
}
@@ -940,4 +942,15 @@ private void refresh() throws IOException {
logger.debug("Refreshing [{}]", index);
client().performRequest("POST", "/" + index + "/_refresh");
}
+
+ private List dataNodes(String index, RestClient client) throws IOException {
+ Response response = client.performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
+ List nodes = new ArrayList<>();
+ List