From bef3180146ff25aa01b473cb667dc1ffc6ecb66d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 7 Oct 2017 13:40:18 -0400 Subject: [PATCH 01/10] Enable BWC testing against other remotes This commit enables BWC testing against remotes on GitHub other than elastic/elasticsearch.git. Relates #26918 --- TESTING.asciidoc | 24 +++++++++++++----------- distribution/bwc/build.gradle | 22 ++++++++++++---------- 2 files changed, 25 insertions(+), 21 deletions(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index d0e1e0f50528c..9f64d1dd0afb8 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -472,28 +472,30 @@ is tested depends on the branch. On master, this will test against the current stable branch. On the stable branch, it will test against the latest release branch. Finally, on a release branch, it will test against the most recent release. -=== BWC Testing against a specific branch +=== BWC Testing against a specific remote/branch Sometimes a backward compatibility change spans two versions. A common case is a new functionality that needs a BWC bridge in and an unreleased versioned of a release branch (for example, 5.x). -To test the changes, you can instruct gradle to build the BWC version from a local branch instead of -pulling the release branch from GitHub. You do so using the `tests.bwc.refspec` system property: +To test the changes, you can instruct gradle to build the BWC version from a another remote/branch combination instead of +pulling the release branch from GitHub. You do so using the `tests.bwc.remote` and `tests.bwc.refspec` system properties: ------------------------------------------------- -gradle check -Dtests.bwc.refspec=origin/index_req_bwc_5.x +gradle check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec=index_req_bwc_5.x ------------------------------------------------- -The branch needs to be available on the local clone that the BWC makes of the repository you run the -tests from. Using the `origin` remote is a handy trick to make sure that a branch is available -and is up to date in the case of multiple runs. +The branch needs to be available on the remote that the BWC makes of the +repository you run the tests from. Using the remote is a handy trick to make +sure that a branch is available and is up to date in the case of multiple runs. Example: -Say you need to make a change to `master` and have a BWC layer in `5.x`. You will need to: -. Create a branch called `index_req_change` off `master`. This will contain your change. +Say you need to make a change to `master` and have a BWC layer in `5.x`. You +will need to: +. Create a branch called `index_req_change` off your remote `${remote}`. This +will contain your change. . Create a branch called `index_req_bwc_5.x` off `5.x`. This will contain your bwc layer. -. If not running the tests locally, push both branches to your remote repository. -. Run the tests with `gradle check -Dtests.bwc.refspec=origin/index_req_bwc_5.x` +. Push both branches to your remote repository. +. Run the tests with `gradle check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec=index_req_bwc_5.x`. == Coverage analysis diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index f2c4a09edd4d8..c84c6a6d5a227 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -63,42 +63,44 @@ if (enabled) { } File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}") + final String remote = System.getProperty("tests.bwc.remote", "elastic") + task createClone(type: LoggedExec) { onlyIf { checkoutDir.exists() == false } commandLine = ['git', 'clone', rootDir, checkoutDir] } - task findUpstream(type: LoggedExec) { + task findRemote(type: LoggedExec) { dependsOn createClone workingDir = checkoutDir commandLine = ['git', 'remote', '-v'] doLast { - project.ext.upstreamExists = false + project.ext.remoteExists = false output.toString('UTF-8').eachLine { - if (it.contains("upstream")) { - project.ext.upstreamExists = true + if (it.contains("${remote}\thttps://github.com/${remote}/elasticsearch.git")) { + project.ext.remoteExists = true } } } } - task addUpstream(type: LoggedExec) { - dependsOn findUpstream - onlyIf { project.ext.upstreamExists == false } + task addRemote(type: LoggedExec) { + dependsOn findRemote + onlyIf { project.ext.remoteExists == false } workingDir = checkoutDir - commandLine = ['git', 'remote', 'add', 'upstream', 'https://github.com/elastic/elasticsearch.git'] + commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"] } task fetchLatest(type: LoggedExec) { onlyIf { project.gradle.startParameter.isOffline() == false } - dependsOn addUpstream + dependsOn addRemote workingDir = checkoutDir commandLine = ['git', 'fetch', '--all'] } String buildMetadataKey = "bwc_refspec_${project.path.substring(1)}" task checkoutBwcBranch(type: LoggedExec) { - String refspec = System.getProperty("tests.bwc.refspec", buildMetadata.get(buildMetadataKey, "upstream/${bwcBranch}")) + String refspec = System.getProperty("tests.bwc.refspec", buildMetadata.get(buildMetadataKey, "${remote}/${bwcBranch}")) dependsOn fetchLatest workingDir = checkoutDir commandLine = ['git', 'checkout', refspec] From c342cdeab5ba196831b111c4d78cc308598361f2 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Sat, 7 Oct 2017 23:25:26 +0200 Subject: [PATCH 02/10] Setup debug logging for qa.full-cluster-restart --- qa/full-cluster-restart/build.gradle | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 95e62416cade3..f271dae5cfda1 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -52,6 +52,9 @@ for (Version version : indexCompatVersions) { // some tests rely on the translog not being flushed setting 'indices.memory.shard_inactive_time', '20m' + // debug logging for testRecovery + setting 'logger.level', 'DEBUG' + if (version.onOrAfter('5.3.0')) { setting 'http.content_type.required', 'true' } @@ -72,6 +75,9 @@ for (Version version : indexCompatVersions) { // some tests rely on the translog not being flushed setting 'indices.memory.shard_inactive_time', '20m' + // debug logging for testRecovery + setting 'logger.level', 'DEBUG' + numNodes = 2 dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir } cleanShared = false // We want to keep snapshots made by the old cluster! @@ -81,6 +87,7 @@ for (Version version : indexCompatVersions) { systemProperty 'tests.is_old_cluster', 'false' systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") + } Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") { From 6825cafaa6ec04b70e69148a9dea785826b0d5d0 Mon Sep 17 00:00:00 2001 From: Karel Minarik Date: Thu, 5 Oct 2017 18:28:33 +0200 Subject: [PATCH 03/10] [API] Added the `terminate_after` parameter to the REST spec for "Count" API Closes #26895 --- rest-api-spec/src/main/resources/rest-api-spec/api/count.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json index 0e2697cd524d2..1275983ef238f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json @@ -67,6 +67,10 @@ "lenient": { "type" : "boolean", "description" : "Specify whether format-based query failures (such as providing text to a numeric field) should be ignored" + }, + "terminate_after" : { + "type" : "number", + "description" : "The maximum count for each shard, upon reaching which the query execution will terminate early" } } }, From 9db21cd23fef561bb290cb8e4dfeefe16ba97d6d Mon Sep 17 00:00:00 2001 From: shaulzorea Date: Sun, 8 Oct 2017 16:12:08 +0300 Subject: [PATCH 04/10] fixing typo in datehistogram-aggregation.asciidoc (#26924) --- .../aggregations/bucket/datehistogram-aggregation.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 11953cce537a2..ea72e07e337b8 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -402,7 +402,7 @@ deprecated[6.0.0, Use `_key` instead of `_time` to order buckets by their dates/ There are some cases where date histogram can't help us, like for example, when we need to aggregate the results by day of the week. -In this case to overcame the problem, we can use a script that returns the day of the week: +In this case to overcome the problem, we can use a script that returns the day of the week: [source,js] From bf4c3642b2d675217fb10145d44ff5caf6f879f4 Mon Sep 17 00:00:00 2001 From: Nhat Date: Sun, 8 Oct 2017 11:03:06 -0400 Subject: [PATCH 05/10] remove _primary and _replica shard preferences (#26791) The shard preference _primary, _replica and its variants were useful for the asynchronous replication. However, with the current impl, they are no longer useful and should be removed. Closes #26335 --- .../search/NoopSearchRequestBuilder.java | 4 +- .../shards/ClusterSearchShardsRequest.java | 4 +- .../ClusterSearchShardsRequestBuilder.java | 4 +- .../elasticsearch/action/get/GetRequest.java | 4 +- .../action/get/GetRequestBuilder.java | 4 +- .../action/get/MultiGetRequest.java | 4 +- .../action/get/MultiGetRequestBuilder.java | 4 +- .../action/get/MultiGetShardRequest.java | 4 +- .../action/search/SearchRequest.java | 4 +- .../action/search/SearchRequestBuilder.java | 4 +- .../MultiTermVectorsShardRequest.java | 4 +- .../termvectors/TermVectorsRequest.java | 3 +- .../TermVectorsRequestBuilder.java | 4 +- .../routing/IndexShardRoutingTable.java | 68 --------------- .../cluster/routing/OperationRouting.java | 8 -- .../cluster/routing/Preference.java | 30 ------- .../structure/RoutingIteratorTests.java | 84 ++++--------------- .../index/translog/TruncateTranslogIT.java | 10 ++- .../indices/state/RareClusterStateIT.java | 3 +- .../basic/SearchWhileCreatingIndexIT.java | 26 ++++-- .../fetch/subphase/MatchedQueriesIT.java | 1 - .../functionscore/RandomScoreFunctionIT.java | 2 +- .../search/preference/SearchPreferenceIT.java | 47 +---------- .../search/profile/query/QueryProfilerIT.java | 2 - .../search/simple/SimpleSearchIT.java | 2 +- docs/reference/docs/get.asciidoc | 4 - docs/reference/docs/index_.asciidoc | 5 +- .../migration/migrate_7_0/cluster.asciidoc | 5 +- .../search/request/preference.asciidoc | 17 +--- .../upgrades/FullClusterRestartIT.java | 35 +++++--- .../elasticsearch/backwards/IndexingIT.java | 10 +-- .../test/bulk/20_list_of_strings.yml | 2 - .../test/client/RandomizingClient.java | 3 +- 33 files changed, 112 insertions(+), 303 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java index 5143bdd870594..529182aa98f7d 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java @@ -142,8 +142,8 @@ public NoopSearchRequestBuilder setRouting(String... routing) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public NoopSearchRequestBuilder setPreference(String preference) { request.preference(preference); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index d8dfd71530922..d127829fa3584 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -146,8 +146,8 @@ public ClusterSearchShardsRequest routing(String... routings) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public ClusterSearchShardsRequest preference(String preference) { this.preference = preference; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java index 7cb7ac1254c60..da31a79fc9bf0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java @@ -55,8 +55,8 @@ public ClusterSearchShardsRequestBuilder setRouting(String... routing) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public ClusterSearchShardsRequestBuilder setPreference(String preference) { request.preference(preference); diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java index 93045182f4c20..ea5dda45279e6 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java @@ -152,8 +152,8 @@ public GetRequest routing(String routing) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public GetRequest preference(String preference) { this.preference = preference; diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java index 973b130bedbd2..1ca8dbde65200 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java @@ -76,8 +76,8 @@ public GetRequestBuilder setRouting(String routing) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public GetRequestBuilder setPreference(String preference) { request.preference(preference); diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index 20a619cec2c70..420e0b448b052 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -284,8 +284,8 @@ public ActionRequestValidationException validate() { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public MultiGetRequest preference(String preference) { this.preference = preference; diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java index a2cb204d5eabf..fd7a6ac88253e 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java @@ -58,8 +58,8 @@ public MultiGetRequestBuilder add(MultiGetRequest.Item item) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public MultiGetRequestBuilder setPreference(String preference) { request.preference(preference); diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java index 25a624b2eb558..fea3cd1043c62 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java @@ -64,8 +64,8 @@ public int shardId() { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public MultiGetShardRequest preference(String preference) { this.preference = preference; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 030d19d8b6879..7bfa317c72c70 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -241,8 +241,8 @@ public SearchRequest routing(String... routings) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public SearchRequest preference(String preference) { this.preference = preference; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 41e5babb64635..922e9be5fd75d 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -144,8 +144,8 @@ public SearchRequestBuilder setRouting(String... routing) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public SearchRequestBuilder setPreference(String preference) { request.preference(preference); diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java index 6356c554991e6..8fdb6398ddccf 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java @@ -59,8 +59,8 @@ public int shardId() { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public MultiTermVectorsShardRequest preference(String preference) { this.preference = preference; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index 0fe83e214463a..1886a8c2661ed 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -294,8 +294,7 @@ public String preference() { /** * Sets the preference to execute the search. Defaults to randomize across - * shards. Can be set to _local to prefer local shards, - * _primary to execute only on primary shards, or a custom value, + * shards. Can be set to _local to prefer local shards or a custom value, * which guarantees that the same order will be used across different * requests. */ diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java index 9aa3ebca759c3..47bd09b100857 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java @@ -99,8 +99,8 @@ public TermVectorsRequestBuilder setParent(String parent) { /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to - * _local to prefer local shards, _primary to execute only on primary shards, or - * a custom value, which guarantees that the same order will be used across different requests. + * _local to prefer local shards or a custom value, which guarantees that the same order + * will be used across different requests. */ public TermVectorsRequestBuilder setPreference(String preference) { request.preference(preference); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index f8d42b3d8f5a0..a2d015a0dd13f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -441,74 +441,6 @@ public ShardIterator primaryShardIt() { return new PlainShardIterator(shardId, primaryAsList); } - public ShardIterator primaryActiveInitializingShardIt() { - if (noPrimariesActive()) { - return new PlainShardIterator(shardId, NO_SHARDS); - } - return primaryShardIt(); - } - - public ShardIterator primaryFirstActiveInitializingShardsIt() { - ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); - // fill it in a randomized fashion - for (ShardRouting shardRouting : shuffler.shuffle(activeShards)) { - ordered.add(shardRouting); - if (shardRouting.primary()) { - // switch, its the matching node id - ordered.set(ordered.size() - 1, ordered.get(0)); - ordered.set(0, shardRouting); - } - } - // no need to worry about primary first here..., its temporal - if (!allInitializingShards.isEmpty()) { - ordered.addAll(allInitializingShards); - } - return new PlainShardIterator(shardId, ordered); - } - - public ShardIterator replicaActiveInitializingShardIt() { - // If the primaries are unassigned, return an empty list (there aren't - // any replicas to query anyway) - if (noPrimariesActive()) { - return new PlainShardIterator(shardId, NO_SHARDS); - } - - LinkedList ordered = new LinkedList<>(); - for (ShardRouting replica : shuffler.shuffle(replicas)) { - if (replica.active()) { - ordered.addFirst(replica); - } else if (replica.initializing()) { - ordered.addLast(replica); - } - } - return new PlainShardIterator(shardId, ordered); - } - - public ShardIterator replicaFirstActiveInitializingShardsIt() { - // If the primaries are unassigned, return an empty list (there aren't - // any replicas to query anyway) - if (noPrimariesActive()) { - return new PlainShardIterator(shardId, NO_SHARDS); - } - - ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); - // fill it in a randomized fashion with the active replicas - for (ShardRouting replica : shuffler.shuffle(replicas)) { - if (replica.active()) { - ordered.add(replica); - } - } - - // Add the primary shard - ordered.add(primary); - - // Add initializing shards last - if (!allInitializingShards.isEmpty()) { - ordered.addAll(allInitializingShards); - } - return new PlainShardIterator(shardId, ordered); - } - public ShardIterator onlyNodeActiveInitializingShardsIt(String nodeId) { ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); int seed = shuffler.nextSeed(); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 296eca476a6c5..87adb55704a25 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -198,14 +198,6 @@ private ShardIterator preferenceActiveShardIterator(IndexShardRoutingTable index return indexShard.preferNodeActiveInitializingShardsIt(nodesIds); case LOCAL: return indexShard.preferNodeActiveInitializingShardsIt(Collections.singleton(localNodeId)); - case PRIMARY: - return indexShard.primaryActiveInitializingShardIt(); - case REPLICA: - return indexShard.replicaActiveInitializingShardIt(); - case PRIMARY_FIRST: - return indexShard.primaryFirstActiveInitializingShardsIt(); - case REPLICA_FIRST: - return indexShard.replicaFirstActiveInitializingShardsIt(); case ONLY_LOCAL: return indexShard.onlyNodeActiveInitializingShardsIt(localNodeId); case ONLY_NODES: diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/Preference.java b/core/src/main/java/org/elasticsearch/cluster/routing/Preference.java index d4685d7aeadc1..9a55a99a51ca8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/Preference.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/Preference.java @@ -39,26 +39,6 @@ public enum Preference { */ LOCAL("_local"), - /** - * Route to primary shards - */ - PRIMARY("_primary"), - - /** - * Route to replica shards - */ - REPLICA("_replica"), - - /** - * Route to primary shards first - */ - PRIMARY_FIRST("_primary_first"), - - /** - * Route to replica shards first - */ - REPLICA_FIRST("_replica_first"), - /** * Route to the local shard only */ @@ -97,16 +77,6 @@ public static Preference parse(String preference) { return PREFER_NODES; case "_local": return LOCAL; - case "_primary": - return PRIMARY; - case "_replica": - return REPLICA; - case "_primary_first": - case "_primaryFirst": - return PRIMARY_FIRST; - case "_replica_first": - case "_replicaFirst": - return REPLICA_FIRST; case "_only_local": case "_onlyLocal": return ONLY_LOCAL; diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index 172bcd6bd558b..6fd11aa91dce6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -50,6 +50,7 @@ import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -415,10 +416,6 @@ public void testShardsAndPreferNodeRouting() { } public void testReplicaShardPreferenceIters() throws Exception { - AllocationService strategy = createAllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .build()); - OperationRouting operationRouting = new OperationRouting(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); @@ -430,69 +427,22 @@ public void testReplicaShardPreferenceIters() throws Exception { .addAsNew(metaData.index("test")) .build(); - ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).build(); - - clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() - .add(newNode("node1")) - .add(newNode("node2")) - .add(newNode("node3")) - .localNodeId("node1") - ).build(); - clusterState = strategy.reroute(clusterState, "reroute"); - - clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); - - // When replicas haven't initialized, it comes back with the primary first, then initializing replicas - GroupShardsIterator shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first"); - assertThat(shardIterators.size(), equalTo(2)); // two potential shards - ShardIterator iter = shardIterators.iterator().next(); - assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard - ShardRouting routing = iter.nextOrNull(); - assertNotNull(routing); - assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1))); - assertTrue(routing.primary()); // replicas haven't initialized yet, so primary is first - assertTrue(routing.started()); - routing = iter.nextOrNull(); - assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1))); - assertFalse(routing.primary()); - assertTrue(routing.initializing()); - routing = iter.nextOrNull(); - assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1))); - assertFalse(routing.primary()); - assertTrue(routing.initializing()); - - clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); - - clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); - - - shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica"); - assertThat(shardIterators.size(), equalTo(2)); // two potential shards - iter = shardIterators.iterator().next(); - assertThat(iter.size(), equalTo(2)); // two potential replicas for the shard - routing = iter.nextOrNull(); - assertNotNull(routing); - assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1))); - assertFalse(routing.primary()); - routing = iter.nextOrNull(); - assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1))); - assertFalse(routing.primary()); - - shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first"); - assertThat(shardIterators.size(), equalTo(2)); // two potential shards - iter = shardIterators.iterator().next(); - assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard - routing = iter.nextOrNull(); - assertNotNull(routing); - assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1))); - assertFalse(routing.primary()); - routing = iter.nextOrNull(); - assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1))); - assertFalse(routing.primary()); - // finally the primary - routing = iter.nextOrNull(); - assertThat(routing.shardId().id(), anyOf(equalTo(0), equalTo(1))); - assertTrue(routing.primary()); + final ClusterState clusterState = ClusterState + .builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData) + .routingTable(routingTable) + .nodes(DiscoveryNodes.builder() + .add(newNode("node1")) + .add(newNode("node2")) + .add(newNode("node3")) + .localNodeId("node1")) + .build(); + + String[] removedPreferences = {"_primary", "_primary_first", "_replica", "_replica_first"}; + for (String pref : removedPreferences) { + expectThrows(IllegalArgumentException.class, + () -> operationRouting.searchShards(clusterState, new String[]{"test"}, null, pref)); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java index c2b394b219a20..b0d4c238679e8 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; @@ -210,7 +211,10 @@ public void testCorruptTranslogTruncation() throws Exception { logger.info("--> starting the replica node to test recovery"); internalCluster().startNode(); ensureGreen("test"); - assertHitCount(client().prepareSearch("test").setPreference("_replica").setQuery(matchAllQuery()).get(), numDocsToKeep); + for (String node : internalCluster().nodesInclude("test")) { + SearchRequestBuilder q = client().prepareSearch("test").setPreference("_only_nodes:" + node).setQuery(matchAllQuery()); + assertHitCount(q.get(), numDocsToKeep); + } final RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").setActiveOnly(false).get(); final RecoveryState replicaRecoveryState = recoveryResponse.shardRecoveryStates().get("test").stream() .filter(recoveryState -> recoveryState.getPrimary() == false).findFirst().get(); @@ -308,7 +312,9 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception { logger.info("--> starting the replica node to test recovery"); internalCluster().startNode(); ensureGreen("test"); - assertHitCount(client().prepareSearch("test").setPreference("_replica").setQuery(matchAllQuery()).get(), totalDocs); + for (String node : internalCluster().nodesInclude("test")) { + assertHitCount(client().prepareSearch("test").setPreference("_only_nodes:" + node).setQuery(matchAllQuery()).get(), totalDocs); + } final RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").setActiveOnly(false).get(); final RecoveryState replicaRecoveryState = recoveryResponse.shardRecoveryStates().get("test").stream() diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 185f27d39c8fe..bf213b51475fb 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -406,8 +406,7 @@ public void onFailure(Exception e) { } }); - // Wait for document to be indexed on primary - assertBusy(() -> assertTrue(client().prepareGet("index", "type", "1").setPreference("_primary").get().isExists())); + assertBusy(() -> assertTrue(client().prepareGet("index", "type", "1").get().isExists())); // The mappings have not been propagated to the replica yet as a consequence the document count not be indexed // We wait on purpose to make sure that the document is not indexed because the shard operation is stalled diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 1ed396672b5d0..1ff2d6922f98b 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -73,13 +74,21 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) logger.info("using preference {}", preference); // we want to make sure that while recovery happens, and a replica gets recovered, its properly refreshed - ClusterHealthStatus status = client().admin().cluster().prepareHealth("test").get().getStatus();; + ClusterHealthStatus status = client().admin().cluster().prepareHealth("test").get().getStatus(); + while (status != ClusterHealthStatus.GREEN) { // first, verify that search on the primary search works - SearchResponse searchResponse = client().prepareSearch("test").setPreference("_primary").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); - assertHitCount(searchResponse, 1); + for (IndexShardRoutingTable shardRoutingTable : clusterService().state().routingTable().index("test")) { + String primaryNode = shardRoutingTable.primaryShard().currentNodeId(); + SearchResponse searchResponse = client().prepareSearch("test") + .setPreference("_only_nodes:" + primaryNode) + .setQuery(QueryBuilders.termQuery("field", "test")) + .execute().actionGet(); + assertHitCount(searchResponse, 1); + break; + } Client client = client(); - searchResponse = client.prepareSearch("test").setPreference(preference + Integer.toString(counter++)).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); + SearchResponse searchResponse = client.prepareSearch("test").setPreference(preference + Integer.toString(counter++)).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); if (searchResponse.getHits().getTotalHits() != 1) { refresh(); SearchResponse searchResponseAfterRefresh = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); @@ -93,8 +102,13 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) status = client().admin().cluster().prepareHealth("test").get().getStatus(); internalCluster().ensureAtLeastNumDataNodes(numberOfReplicas + 1); } - SearchResponse searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); - assertHitCount(searchResponse, 1); + + for (String node : internalCluster().nodesInclude("test")) { + SearchResponse searchResponse = client().prepareSearch("test") + .setPreference("_prefer_nodes:" + node) + .setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet(); + assertHitCount(searchResponse, 1); + } cluster().wipeIndices("test"); } } diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java index 763518804e277..761f9798d7286 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java @@ -301,7 +301,6 @@ public void testMatchedWithShould() throws Exception { .should(queryStringQuery("dolor").queryName("dolor")) .should(queryStringQuery("elit").queryName("elit")) ) - .setPreference("_primary") .get(); assertHitCount(searchResponse, 2L); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 31366c2534cb2..257089c90545f 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -107,7 +107,7 @@ public void testConsistentHitsWithSameSeed() throws Exception { for (int o = 0; o < outerIters; o++) { final int seed = randomInt(); String preference = randomRealisticUnicodeOfLengthBetween(1, 10); // at least one char!! - // randomPreference should not start with '_' (reserved for known preference types (e.g. _shards, _primary) + // randomPreference should not start with '_' (reserved for known preference types (e.g. _shards) while (preference.startsWith("_")) { preference = randomRealisticUnicodeOfLengthBetween(1, 10); } diff --git a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java index 6478446a1a254..8cbb626b6770e 100644 --- a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java +++ b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java @@ -44,7 +44,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -67,7 +66,7 @@ public void testStopOneNodePreferenceWithRedState() throws InterruptedException, refresh(); internalCluster().stopRandomDataNode(); client().admin().cluster().prepareHealth().setWaitForStatus(ClusterHealthStatus.RED).execute().actionGet(); - String[] preferences = new String[] {"_primary", "_local", "_primary_first", "_prefer_nodes:somenode", "_prefer_nodes:server2", "_prefer_nodes:somenode,server2"}; + String[] preferences = new String[]{"_local", "_prefer_nodes:somenode", "_prefer_nodes:server2", "_prefer_nodes:somenode,server2"}; for (String pref : preferences) { logger.info("--> Testing out preference={}", pref); SearchResponse searchResponse = client().prepareSearch().setSize(0).setPreference(pref).execute().actionGet(); @@ -113,54 +112,14 @@ public void testSimplePreference() throws Exception { client().prepareIndex("test", "type1").setSource("field1", "value1").execute().actionGet(); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_primary").execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_primary").execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("1234").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("1234").execute().actionGet(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - } - - public void testReplicaPreference() throws Exception { - client().admin().indices().prepareCreate("test").setSettings("{\"number_of_replicas\": 0}", XContentType.JSON).get(); - ensureGreen(); - - client().prepareIndex("test", "type1").setSource("field1", "value1").execute().actionGet(); - refresh(); - - try { - client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet(); - fail("should have failed because there are no replicas"); - } catch (Exception e) { - // pass - } - - SearchResponse resp = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet(); - assertThat(resp.getHits().getTotalHits(), equalTo(1L)); - - client().admin().indices().prepareUpdateSettings("test").setSettings("{\"number_of_replicas\": 1}", XContentType.JSON).get(); - ensureGreen("test"); - - resp = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet(); - assertThat(resp.getHits().getTotalHits(), equalTo(1L)); } public void testThatSpecifyingNonExistingNodesReturnsUsefulError() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index d5198485351b1..14378fdb1c8a9 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -134,14 +134,12 @@ public void testProfileMatchesRegular() throws Exception { .setQuery(q) .setProfile(false) .addSort("_id", SortOrder.ASC) - .setPreference("_primary") .setSearchType(SearchType.QUERY_THEN_FETCH); SearchRequestBuilder profile = client().prepareSearch("test") .setQuery(q) .setProfile(true) .addSort("_id", SortOrder.ASC) - .setPreference("_primary") .setSearchType(SearchType.QUERY_THEN_FETCH); MultiSearchResponse.Item[] responses = client().prepareMultiSearch() diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 88c403a1d7fb5..9eacb0e81bd29 100644 --- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -79,7 +79,7 @@ public void testSearchRandomPreference() throws InterruptedException, ExecutionE int iters = scaledRandomIntBetween(10, 20); for (int i = 0; i < iters; i++) { String randomPreference = randomUnicodeOfLengthBetween(0, 4); - // randomPreference should not start with '_' (reserved for known preference types (e.g. _shards, _primary) + // randomPreference should not start with '_' (reserved for known preference types (e.g. _shards) while (randomPreference.startsWith("_")) { randomPreference = randomUnicodeOfLengthBetween(0, 4); } diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index 2a252595dd59a..11b2347e7f31b 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -275,10 +275,6 @@ replicas. The `preference` can be set to: -`_primary`:: - The operation will go and be executed only on the primary - shards. - `_local`:: The operation will prefer to be executed on a local allocated shard if possible. diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index 8e18f3034e82b..7875f011abee1 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -91,8 +91,7 @@ will control the version of the document the operation is intended to be executed against. A good example of a use case for versioning is performing a transactional read-then-update. Specifying a `version` from the document initially read ensures no changes have happened in the -meantime (when reading in order to update, it is recommended to set -`preference` to `_primary`). For example: +meantime. For example: [source,js] -------------------------------------------------- @@ -242,7 +241,7 @@ The result of the above index operation is: [[index-routing]] === Routing -By default, shard placement — or `routing` — is controlled by using a +By default, shard placement ? or `routing` ? is controlled by using a hash of the document's id value. For more explicit control, the value fed into the hash function used by the router can be directly specified on a per-operation basis using the `routing` parameter. For example: diff --git a/docs/reference/migration/migrate_7_0/cluster.asciidoc b/docs/reference/migration/migrate_7_0/cluster.asciidoc index 12e6916e00145..e9584074d73d2 100644 --- a/docs/reference/migration/migrate_7_0/cluster.asciidoc +++ b/docs/reference/migration/migrate_7_0/cluster.asciidoc @@ -6,8 +6,11 @@ Due to cross-cluster search using `:` to separate a cluster and index name, cluster names may no longer contain `:`. -==== new default for `wait_for_active_shards` parameter of the open index command +==== New default for `wait_for_active_shards` parameter of the open index command The default value for the `wait_for_active_shards` parameter of the open index API is changed from 0 to 1, which means that the command will now by default wait for all primary shards of the opened index to be allocated. + +==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed +These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences. diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc index d0f60d700a82c..dbd9055ff8c86 100644 --- a/docs/reference/search/request/preference.asciidoc +++ b/docs/reference/search/request/preference.asciidoc @@ -7,21 +7,6 @@ search. By default, the operation is randomized among the available shard copies The `preference` is a query string parameter which can be set to: [horizontal] -`_primary`:: - The operation will go and be executed only on the primary - shards. - -`_primary_first`:: - The operation will go and be executed on the primary - shard, and if not available (failover), will execute on other shards. - -`_replica`:: - The operation will go and be executed only on a replica shard. - -`_replica_first`:: - The operation will go and be executed only on a replica shard, and if - not available (failover), will execute on other shards. - `_local`:: The operation will prefer to be executed on a local allocated shard if possible. @@ -33,7 +18,7 @@ The `preference` is a query string parameter which can be set to: `_shards:2,3`:: Restricts the operation to the specified shards. (`2` and `3` in this case). This preference can be combined with other - preferences but it has to appear first: `_shards:2,3|_primary` + preferences but it has to appear first: `_shards:2,3|_local` `_only_nodes`:: Restricts the operation to nodes specified in <> diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index c7e708418c92c..22859859f2521 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -25,6 +25,7 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -37,12 +38,15 @@ import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.Base64; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -227,17 +231,15 @@ public void testNewReplicasWork() throws Exception { Map recoverRsp = toMap(client().performRequest("GET", "/" + index + "/_recovery")); logger.debug("--> recovery status:\n{}", recoverRsp); - Map responseBody = toMap(client().performRequest("GET", "/" + index + "/_search", - Collections.singletonMap("preference", "_primary"))); - assertNoFailures(responseBody); - int foundHits1 = (int) XContentMapValues.extractValue("hits.total", responseBody); - - responseBody = toMap(client().performRequest("GET", "/" + index + "/_search", - Collections.singletonMap("preference", "_replica"))); - assertNoFailures(responseBody); - int foundHits2 = (int) XContentMapValues.extractValue("hits.total", responseBody); - assertEquals(foundHits1, foundHits2); - // TODO: do something more with the replicas! index? + Set counts = new HashSet<>(); + for (String node : dataNodes(index, client())) { + Map responseBody = toMap(client().performRequest("GET", "/" + index + "/_search", + Collections.singletonMap("preference", "_only_nodes:" + node))); + assertNoFailures(responseBody); + int hits = (int) XContentMapValues.extractValue("hits.total", responseBody); + counts.add(hits); + } + assertEquals("All nodes should have a consistent number of documents", 1, counts.size()); } } @@ -940,4 +942,15 @@ private void refresh() throws IOException { logger.debug("Refreshing [{}]", index); client().performRequest("POST", "/" + index + "/_refresh"); } + + private List dataNodes(String index, RestClient client) throws IOException { + Response response = client.performRequest("GET", index + "/_stats", singletonMap("level", "shards")); + List nodes = new ArrayList<>(); + List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); + for (Object shard : shardStats) { + final String nodeId = ObjectPath.evaluate(shard, "routing.node"); + nodes.add(nodeId); + } + return nodes; + } } diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java index b3ef9cb7dd02b..c6200417e39d8 100644 --- a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java @@ -171,9 +171,6 @@ public void testIndexVersionPropagation() throws Exception { assertVersion(index, 5, "_only_nodes:" + shard.getNode().getNodeName(), finalVersionForDoc5); assertCount(index, "_only_nodes:" + shard.getNode().getNodeName(), 5); } - // the number of documents on the primary and on the recovered replica should match the number of indexed documents - assertCount(index, "_primary", 5); - assertCount(index, "_replica", 5); } } @@ -232,9 +229,10 @@ public void testSeqNoCheckpoints() throws Exception { updateIndexSetting(index, Settings.builder().put("index.number_of_replicas", 1)); ensureGreen(); assertOK(client().performRequest("POST", index + "/_refresh")); - // the number of documents on the primary and on the recovered replica should match the number of indexed documents - assertCount(index, "_primary", numDocs); - assertCount(index, "_replica", numDocs); + + for (Shard shard : buildShards(index, nodes, newNodeClient)) { + assertCount(index, "_only_nodes:" + shard.node.nodeName, numDocs); + } assertSeqNoOnShards(index, nodes, numDocs, newNodeClient); } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml index e25626cf3ae28..def91f4280722 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml @@ -11,8 +11,6 @@ - do: count: - # we count through the primary in case there is a replica that has not yet fully recovered - preference: _primary index: test_index - match: {count: 2} diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java index b144898d643d0..e1a6ba030fde8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.unit.TimeValue; import java.util.Arrays; -import java.util.EnumSet; import java.util.Random; import java.util.concurrent.TimeUnit; @@ -52,7 +51,7 @@ public RandomizingClient(Client client, Random random) { SearchType.DFS_QUERY_THEN_FETCH, SearchType.QUERY_THEN_FETCH)); if (random.nextInt(10) == 0) { - defaultPreference = RandomPicks.randomFrom(random, EnumSet.of(Preference.PRIMARY_FIRST, Preference.LOCAL)).type(); + defaultPreference = Preference.LOCAL.type(); } else if (random.nextInt(10) == 0) { String s = TestUtil.randomRealisticUnicodeString(random, 1, 10); defaultPreference = s.startsWith("_") ? null : s; // '_' is a reserved character From cdd7c1e6c26474721a1513c83ab3ca6473e9f9ef Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 9 Oct 2017 09:52:08 +0200 Subject: [PATCH 06/10] Return List instead of an array from settings (#26903) Today we return a `String[]` that requires copying values for every access. Yet, we already store the setting as a list so we can also directly return the unmodifiable list directly. This makes list / array access in settings a much cheaper operation especially if lists are large. --- .../decider/AwarenessAllocationDecider.java | 11 +- .../common/settings/Setting.java | 14 +- .../common/settings/Settings.java | 48 +++---- .../org/elasticsearch/env/Environment.java | 2 +- .../index/analysis/Analysis.java | 11 +- .../analysis/CustomAnalyzerProvider.java | 8 +- .../analysis/CustomNormalizerProvider.java | 8 +- .../analysis/EdgeNGramTokenizerFactory.java | 2 +- .../index/analysis/NGramTokenizerFactory.java | 7 +- .../analysis/SynonymTokenFilterFactory.java | 2 +- .../indices/TransportAnalyzeActionTests.java | 3 +- .../AbstractTermVectorsTestCase.java | 2 +- .../action/termvectors/GetTermVectorsIT.java | 6 +- .../termvectors/GetTermVectorsTests.java | 2 +- .../cluster/settings/ClusterSettingsIT.java | 8 +- .../common/settings/ScopedSettingsTests.java | 42 +++--- .../common/settings/SettingTests.java | 24 ++-- .../common/settings/SettingsTests.java | 129 +++++++++--------- .../discovery/ZenFaultDetectionTests.java | 3 +- .../discovery/zen/UnicastZenPingTests.java | 6 +- .../elasticsearch/env/EnvironmentTests.java | 4 +- .../env/NodeEnvironmentTests.java | 6 +- .../index/IndexSettingsTests.java | 2 +- .../org/elasticsearch/index/IndexSortIT.java | 10 +- .../index/IndexSortSettingsTests.java | 8 +- .../index/analysis/AnalysisRegistryTests.java | 8 +- .../index/analysis/AnalysisTests.java | 3 +- .../index/analysis/CustomNormalizerTests.java | 10 +- .../synonyms/SynonymsAnalysisTests.java | 12 +- .../index/mapper/KeywordFieldMapperTests.java | 4 +- .../query/QueryStringQueryBuilderTests.java | 4 +- .../query/SimpleQueryStringBuilderTests.java | 4 +- .../index/search/MatchQueryIT.java | 8 +- .../index/search/MultiMatchQueryTests.java | 4 +- .../index/shard/NewPathForShardTests.java | 3 +- .../index/store/FsDirectoryServiceTests.java | 4 +- .../indices/analyze/AnalyzeActionIT.java | 6 +- .../highlight/HighlighterSearchIT.java | 8 +- .../search/functionscore/QueryRescorerIT.java | 14 +- .../search/query/QueryStringIT.java | 6 +- .../search/query/SearchQueryIT.java | 12 +- .../suggest/CompletionSuggestSearchIT.java | 6 +- .../search/suggest/SuggestSearchIT.java | 29 ++-- .../SharedClusterSnapshotRestoreIT.java | 2 +- .../transport/RemoteClusterServiceTests.java | 16 +-- .../validate/SimpleValidateQueryIT.java | 4 +- .../common/CJKBigramFilterFactory.java | 5 +- .../common/HtmlStripCharFilterFactory.java | 5 +- .../common/KeepTypesFilterFactory.java | 6 +- .../common/KeepWordFilterFactory.java | 5 +- ...PatternCaptureGroupTokenFilterFactory.java | 9 +- .../CommonGramsTokenFilterFactoryTests.java | 18 +-- .../common/HighlighterWithAnalyzersTests.java | 8 +- .../common/KeepFilterFactoryTests.java | 2 +- .../common/KeepTypesFilterFactoryTests.java | 2 +- .../analysis/common/MassiveWordListTests.java | 4 +- .../common/NGramTokenizerFactoryTests.java | 2 +- .../index/analysis/IcuTokenizerFactory.java | 3 +- .../analysis/PhoneticTokenFilterFactory.java | 8 +- .../discovery/ec2/Ec2DiscoveryTests.java | 2 +- .../discovery/gce/GceDiscoveryTests.java | 18 +-- .../azure/AzureRepositorySettingsTests.java | 2 +- .../bootstrap/EvilSecurityTests.java | 4 +- .../env/NodeEnvironmentEvilTests.java | 6 +- .../org/elasticsearch/test/ESTestCase.java | 4 +- .../ClusterDiscoveryConfiguration.java | 2 +- .../AbstractSimpleTransportTestCase.java | 4 +- 67 files changed, 322 insertions(+), 332 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 4160fd224aa14..f00e9cdc3ce8f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.routing.allocation.decider; import java.util.HashMap; +import java.util.List; import java.util.Map; import com.carrotsearch.hppc.ObjectIntHashMap; @@ -85,7 +86,7 @@ public class AwarenessAllocationDecider extends AllocationDecider { private volatile String[] awarenessAttributes; - private volatile Map forcedAwarenessAttributes; + private volatile Map> forcedAwarenessAttributes; public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); @@ -97,11 +98,11 @@ public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSett } private void setForcedAwarenessAttributes(Settings forceSettings) { - Map forcedAwarenessAttributes = new HashMap<>(); + Map> forcedAwarenessAttributes = new HashMap<>(); Map forceGroups = forceSettings.getAsGroups(); for (Map.Entry entry : forceGroups.entrySet()) { - String[] aValues = entry.getValue().getAsArray("values"); - if (aValues.length > 0) { + List aValues = entry.getValue().getAsList("values"); + if (aValues.size() > 0) { forcedAwarenessAttributes.put(entry.getKey(), aValues); } } @@ -169,7 +170,7 @@ private Decision underCapacity(ShardRouting shardRouting, RoutingNode node, Rout } int numberOfAttributes = nodesPerAttribute.size(); - String[] fullValues = forcedAwarenessAttributes.get(awarenessAttribute); + List fullValues = forcedAwarenessAttributes.get(awarenessAttribute); if (fullValues != null) { for (String fullValue : fullValues) { if (!shardPerAttribute.containsKey(fullValue)) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index ee6e422e82676..f35df27e3b338 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -804,14 +804,14 @@ private static class ListSetting extends Setting> { private ListSetting(String key, Function> defaultStringValue, Function> parser, Property... properties) { - super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, + super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s)), parser, properties); this.defaultStringValue = defaultStringValue; } @Override public String getRaw(Settings settings) { - String[] array = settings.getAsArray(getKey(), null); + List array = settings.getAsList(getKey(), null); return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); } @@ -823,11 +823,11 @@ boolean hasComplexMatcher() { @Override public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) { if (exists(source) == false) { - String[] asArray = defaultSettings.getAsArray(getKey(), null); - if (asArray == null) { - builder.putArray(getKey(), defaultStringValue.apply(defaultSettings)); + List asList = defaultSettings.getAsList(getKey(), null); + if (asList == null) { + builder.putList(getKey(), defaultStringValue.apply(defaultSettings)); } else { - builder.putArray(getKey(), asArray); + builder.putList(getKey(), asList); } } } @@ -1087,7 +1087,7 @@ private static List parseableStringToList(String parsableString) { } } - private static String arrayToParsableString(String[] array) { + private static String arrayToParsableString(List array) { try { XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startArray(); diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index a1adef3338662..41acefdd8e879 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -366,48 +366,48 @@ public SizeValue getAsSize(String setting, SizeValue defaultValue) throws Settin } /** - * The values associated with a setting key as an array. + * The values associated with a setting key as an immutable list. *

* It will also automatically load a comma separated list under the settingPrefix and merge with * the numbered format. * - * @param key The setting prefix to load the array by - * @return The setting array values + * @param key The setting key to load the list by + * @return The setting list values */ - public String[] getAsArray(String key) throws SettingsException { - return getAsArray(key, Strings.EMPTY_ARRAY, true); + public List getAsList(String key) throws SettingsException { + return getAsList(key, Collections.emptyList()); } /** - * The values associated with a setting key as an array. + * The values associated with a setting key as an immutable list. *

* If commaDelimited is true, it will automatically load a comma separated list under the settingPrefix and merge with * the numbered format. * - * @param key The setting key to load the array by - * @return The setting array values + * @param key The setting key to load the list by + * @return The setting list values */ - public String[] getAsArray(String key, String[] defaultArray) throws SettingsException { - return getAsArray(key, defaultArray, true); + public List getAsList(String key, List defaultValue) throws SettingsException { + return getAsList(key, defaultValue, true); } /** - * The values associated with a setting key as an array. + * The values associated with a setting key as an immutable list. *

* It will also automatically load a comma separated list under the settingPrefix and merge with * the numbered format. * - * @param key The setting key to load the array by - * @param defaultArray The default array to use if no value is specified + * @param key The setting key to load the list by + * @param defaultValue The default value to use if no value is specified * @param commaDelimited Whether to try to parse a string as a comma-delimited value - * @return The setting array values + * @return The setting list values */ - public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelimited) throws SettingsException { + public List getAsList(String key, List defaultValue, Boolean commaDelimited) throws SettingsException { List result = new ArrayList<>(); final Object valueFromPrefix = settings.get(key); if (valueFromPrefix != null) { if (valueFromPrefix instanceof List) { - result = ((List) valueFromPrefix); + return ((List) valueFromPrefix); // it's already unmodifiable since the builder puts it as a such } else if (commaDelimited) { String[] strings = Strings.splitStringByCommaToArray(get(key)); if (strings.length > 0) { @@ -421,9 +421,9 @@ public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelim } if (result.isEmpty()) { - return defaultArray; + return defaultValue; } - return result.toArray(new String[result.size()]); + return Collections.unmodifiableList(result); } @@ -552,7 +552,7 @@ public static Settings readSettingsFromStream(StreamInput in) throws IOException if (value == null) { builder.putNull(key); } else if (value instanceof List) { - builder.putArray(key, (List) value); + builder.putList(key, (List) value); } else { builder.put(key, value.toString()); } @@ -679,7 +679,7 @@ private static void fromXContent(XContentParser parser, StringBuilder keyBuilder } String key = keyBuilder.toString(); validateValue(key, list, builder, parser, allowNullValues); - builder.putArray(key, list); + builder.putList(key, list); } else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { String key = keyBuilder.toString(); validateValue(key, null, builder, parser, allowNullValues); @@ -898,7 +898,7 @@ public Builder copy(String key, String sourceKey, Settings source) { } final Object value = source.settings.get(sourceKey); if (value instanceof List) { - return putArray(key, (List)value); + return putList(key, (List)value); } else if (value == null) { return putNull(key); } else { @@ -1022,8 +1022,8 @@ public Builder put(String setting, long value, ByteSizeUnit sizeUnit) { * @param values The values * @return The builder */ - public Builder putArray(String setting, String... values) { - return putArray(setting, Arrays.asList(values)); + public Builder putList(String setting, String... values) { + return putList(setting, Arrays.asList(values)); } /** @@ -1033,7 +1033,7 @@ public Builder putArray(String setting, String... values) { * @param values The values * @return The builder */ - public Builder putArray(String setting, List values) { + public Builder putList(String setting, List values) { remove(setting); map.put(setting, Collections.unmodifiableList(new ArrayList<>(values))); return this; diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index 27fe23048d79c..31a67333a810f 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -153,7 +153,7 @@ public Environment(final Settings settings, final Path configPath) { Settings.Builder finalSettings = Settings.builder().put(settings); finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile); if (PATH_DATA_SETTING.exists(settings)) { - finalSettings.putArray(PATH_DATA_SETTING.getKey(), dataPaths); + finalSettings.putList(PATH_DATA_SETTING.getKey(), dataPaths); } finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString()); this.settings = finalSettings.build(); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java index c6f1bfe7f41d1..d736703f6418e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -68,7 +68,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -105,10 +104,10 @@ public static CharArraySet parseStemExclusion(Settings settings, CharArraySet de if ("_none_".equals(value)) { return CharArraySet.EMPTY_SET; } - String[] stemExclusion = settings.getAsArray("stem_exclusion", null); + List stemExclusion = settings.getAsList("stem_exclusion", null); if (stemExclusion != null) { // LUCENE 4 UPGRADE: Should be settings.getAsBoolean("stem_exclusion_case", false)? - return new CharArraySet(Arrays.asList(stemExclusion), false); + return new CharArraySet(stemExclusion, false); } else { return defaultStemExclusion; } @@ -161,7 +160,7 @@ public static CharArraySet parseWords(Environment env, Settings settings, String if ("_none_".equals(value)) { return CharArraySet.EMPTY_SET; } else { - return resolveNamedWords(Arrays.asList(settings.getAsArray(name)), namedWords, ignoreCase); + return resolveNamedWords(settings.getAsList(name), namedWords, ignoreCase); } } List pathLoadedWords = getWordList(env, settings, name); @@ -225,11 +224,11 @@ public static List getWordList(Environment env, Settings settings, Strin String wordListPath = settings.get(settingPrefix + "_path", null); if (wordListPath == null) { - String[] explicitWordList = settings.getAsArray(settingPrefix, null); + List explicitWordList = settings.getAsList(settingPrefix, null); if (explicitWordList == null) { return null; } else { - return Arrays.asList(explicitWordList); + return explicitWordList; } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java index e9654719bdc11..4ba078051640a 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java @@ -58,8 +58,8 @@ public void build(final Map tokenizers, final Map charFiltersList = new ArrayList<>(charFilterNames.length); + List charFilterNames = analyzerSettings.getAsList("char_filter"); + List charFiltersList = new ArrayList<>(charFilterNames.size()); for (String charFilterName : charFilterNames) { CharFilterFactory charFilter = charFilters.get(charFilterName); if (charFilter == null) { @@ -74,8 +74,8 @@ public void build(final Map tokenizers, final Map tokenFilterList = new ArrayList<>(tokenFilterNames.length); + List tokenFilterNames = analyzerSettings.getAsList("filter"); + List tokenFilterList = new ArrayList<>(tokenFilterNames.size()); for (String tokenFilterName : tokenFilterNames) { TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName); if (tokenFilter == null) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java index a375c1e8e3b9d..a0a7859d50cfd 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java @@ -50,8 +50,8 @@ public void build(final TokenizerFactory keywordTokenizerFactory, final Map charFiltersList = new ArrayList<>(charFilterNames.length); + List charFilterNames = analyzerSettings.getAsList("char_filter"); + List charFiltersList = new ArrayList<>(charFilterNames.size()); for (String charFilterName : charFilterNames) { CharFilterFactory charFilter = charFilters.get(charFilterName); if (charFilter == null) { @@ -66,8 +66,8 @@ public void build(final TokenizerFactory keywordTokenizerFactory, final Map tokenFilterList = new ArrayList<>(tokenFilterNames.length); + List tokenFilterNames = analyzerSettings.getAsList("filter"); + List tokenFilterList = new ArrayList<>(tokenFilterNames.size()); for (String tokenFilterName : tokenFilterNames) { TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName); if (tokenFilter == null) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java index cb696219f4ed6..8210883b2f8f5 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java @@ -41,7 +41,7 @@ public EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment enviro super(indexSettings, name, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); - this.matcher = parseTokenChars(settings.getAsArray("token_chars")); + this.matcher = parseTokenChars(settings.getAsList("token_chars")); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java index 17acddf55e1a3..2a31f1eb26a3e 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java @@ -28,6 +28,7 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; @@ -65,8 +66,8 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory { MATCHERS = unmodifiableMap(matchers); } - static CharMatcher parseTokenChars(String[] characterClasses) { - if (characterClasses == null || characterClasses.length == 0) { + static CharMatcher parseTokenChars(List characterClasses) { + if (characterClasses == null || characterClasses.isEmpty()) { return null; } CharMatcher.Builder builder = new CharMatcher.Builder(); @@ -85,7 +86,7 @@ public NGramTokenizerFactory(IndexSettings indexSettings, Environment environmen super(indexSettings, name, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); - this.matcher = parseTokenChars(settings.getAsArray("token_chars")); + this.matcher = parseTokenChars(settings.getAsList("token_chars")); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index f61d51258b01b..bf9045c5d00e1 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -62,7 +62,7 @@ public TokenStream create(TokenStream tokenStream) { protected Reader getRulesFromSettings(Environment env) { Reader rulesReader; - if (settings.getAsArray("synonyms", null) != null) { + if (settings.getAsList("synonyms", null) != null) { List rulesList = Analysis.getWordList(env, settings, "synonyms"); StringBuilder sb = new StringBuilder(); for (String line : rulesList) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 06e81242af418..fc41770b37766 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; import org.elasticsearch.indices.analysis.AnalysisModuleTests.AppendCharFilter; import org.elasticsearch.plugins.AnalysisPlugin; -import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -73,7 +72,7 @@ public void setUp() throws Exception { .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") .put("index.analysis.analyzer.custom_analyzer.filter", "mock") .put("index.analysis.normalizer.my_normalizer.type", "custom") - .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase").build(); + .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase").build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); environment = new Environment(settings); AnalysisPlugin plugin = new AnalysisPlugin() { diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index 15a2f9e74a461..bd76557f9a86f 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -210,7 +210,7 @@ protected void createIndexBasedOnFieldSettings(String index, String alias, TestF Settings.Builder settings = Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.tv_test.tokenizer", "standard") - .putArray("index.analysis.analyzer.tv_test.filter", "lowercase"); + .putList("index.analysis.analyzer.tv_test.filter", "lowercase"); assertAcked(prepareCreate(index).addMapping("type1", mappingBuilder).setSettings(settings).addAlias(new Alias(alias))); } diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index bbd7d5501783c..520c881aa7e62 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -189,7 +189,7 @@ public void testSimpleTermVectors() throws IOException { .setSettings(Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.tv_test.filter", "lowercase"))); + .putList("index.analysis.analyzer.tv_test.filter", "lowercase"))); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog") @@ -261,7 +261,7 @@ public void testRandomSingleTermVectors() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", mapping) .setSettings(Settings.builder() .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.tv_test.filter", "lowercase"))); + .putList("index.analysis.analyzer.tv_test.filter", "lowercase"))); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog") @@ -395,7 +395,7 @@ public void testSimpleTermVectorsWithGenerate() throws IOException { .setSettings(Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.tv_test.filter", "lowercase"))); + .putList("index.analysis.analyzer.tv_test.filter", "lowercase"))); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java index e4d55da9f92b1..5e81949402055 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java @@ -152,7 +152,7 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio .field("analyzer", "payload_test").endObject().endObject().endObject().endObject(); Settings setting = Settings.builder() .put("index.analysis.analyzer.payload_test.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter") + .putList("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter") .put("index.analysis.filter.my_delimited_payload_filter.delimiter", delimiter) .put("index.analysis.filter.my_delimited_payload_filter.encoding", encodingString) .put("index.analysis.filter.my_delimited_payload_filter.type", "mock_payload_filter").build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 7b1ac5116f2dd..cdcaf4a1b9c20 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -34,6 +34,8 @@ import org.elasticsearch.test.ESIntegTestCase; import org.junit.After; +import java.util.Arrays; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.hamcrest.Matchers.containsString; @@ -242,11 +244,11 @@ public void testClusterSettingsUpdateResponse() { public void testCanUpdateTracerSettings() { ClusterUpdateSettingsResponse clusterUpdateSettingsResponse = client().admin().cluster() .prepareUpdateSettings() - .setTransientSettings(Settings.builder().putArray("transport.tracer.include", "internal:index/shard/recovery/*", + .setTransientSettings(Settings.builder().putList("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*")) .get(); - assertArrayEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsArray("transport.tracer.include"), new String[] {"internal:index/shard/recovery/*", - "internal:gateway/local*"}); + assertEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsList("transport.tracer.include"), + Arrays.asList("internal:index/shard/recovery/*", "internal:gateway/local*")); } public void testUpdateDiscoveryPublishTimeout() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 24f9550a78de6..bd4ac25a8747b 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -179,8 +179,8 @@ public void testAddConsumerAffix() { service.applySettings(Settings.builder() .put("foo.test.bar", 2) .put("foo.test_1.bar", 7) - .putArray("foo.test_list.list", "16", "17") - .putArray("foo.test_list_1.list", "18", "19", "20") + .putList("foo.test_list.list", "16", "17") + .putList("foo.test_list_1.list", "18", "19", "20") .build()); assertEquals(2, intResults.get("test").intValue()); assertEquals(7, intResults.get("test_1").intValue()); @@ -195,7 +195,7 @@ public void testAddConsumerAffix() { service.applySettings(Settings.builder() .put("foo.test.bar", 2) .put("foo.test_1.bar", 8) - .putArray("foo.test_list.list", "16", "17") + .putList("foo.test_list.list", "16", "17") .putNull("foo.test_list_1.list") .build()); assertNull("test wasn't changed", intResults.get("test")); @@ -231,8 +231,8 @@ public void testAddConsumerAffixMap() { service.applySettings(Settings.builder() .put("foo.test.bar", 2) .put("foo.test_1.bar", 7) - .putArray("foo.test_list.list", "16", "17") - .putArray("foo.test_list_1.list", "18", "19", "20") + .putList("foo.test_list.list", "16", "17") + .putList("foo.test_list_1.list", "18", "19", "20") .build()); assertEquals(2, intResults.get("test").intValue()); assertEquals(7, intResults.get("test_1").intValue()); @@ -247,7 +247,7 @@ public void testAddConsumerAffixMap() { service.applySettings(Settings.builder() .put("foo.test.bar", 2) .put("foo.test_1.bar", 8) - .putArray("foo.test_list.list", "16", "17") + .putList("foo.test_list.list", "16", "17") .putNull("foo.test_list_1.list") .build()); assertNull("test wasn't changed", intResults.get("test")); @@ -470,14 +470,14 @@ public void testDiff() throws IOException { Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); assertEquals(2, diff.size()); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1)); - assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"}); + assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c")); diff = settings.diff( Settings.builder().put("foo.bar", 5).build(), - Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build()); + Settings.builder().put("foo.bar.baz", 17).putList("foo.bar.quux", "d", "e", "f").build()); assertEquals(2, diff.size()); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17)); - assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"}); + assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("d", "e", "f")); diff = settings.diff( Settings.builder().put("some.group.foo", 5).build(), @@ -485,7 +485,7 @@ public void testDiff() throws IOException { assertEquals(4, diff.size()); assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17)); assertNull(diff.get("some.group.foo")); - assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"}); + assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c")); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1)); assertThat(diff.getAsInt("foo.bar", null), equalTo(1)); @@ -495,7 +495,7 @@ public void testDiff() throws IOException { assertEquals(4, diff.size()); assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17)); assertNull(diff.get("some.prefix.foo.somekey")); - assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"}); + assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c")); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1)); assertThat(diff.getAsInt("foo.bar", null), equalTo(1)); } @@ -513,14 +513,14 @@ public void testDiffWithAffixAndComplexMatcher() { Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY); assertEquals(1, diff.size()); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1)); - assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys + assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys diff = settings.diff( Settings.builder().put("foo.bar", 5).build(), - Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build()); + Settings.builder().put("foo.bar.baz", 17).putList("foo.bar.quux", "d", "e", "f").build()); assertEquals(2, diff.size()); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17)); - assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"}); + assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("d", "e", "f")); diff = settings.diff( Settings.builder().put("some.group.foo", 5).build(), @@ -528,7 +528,7 @@ public void testDiffWithAffixAndComplexMatcher() { assertEquals(3, diff.size()); assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17)); assertNull(diff.get("some.group.foo")); - assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys + assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1)); assertThat(diff.getAsInt("foo.bar", null), equalTo(1)); @@ -538,21 +538,21 @@ public void testDiffWithAffixAndComplexMatcher() { assertEquals(3, diff.size()); assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17)); assertNull(diff.get("some.prefix.foo.somekey")); - assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys + assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1)); assertThat(diff.getAsInt("foo.bar", null), equalTo(1)); diff = settings.diff( Settings.builder().put("some.prefix.foo.somekey", 5).build(), Settings.builder().put("some.prefix.foobar.somekey", 17).put("some.prefix.foo.somekey", 18) - .putArray("foo.bar.quux", "x", "y", "z") - .putArray("foo.baz.quux", "d", "e", "f") + .putList("foo.bar.quux", "x", "y", "z") + .putList("foo.baz.quux", "d", "e", "f") .build()); assertEquals(5, diff.size()); assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17)); assertNull(diff.get("some.prefix.foo.somekey")); - assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"x", "y", "z"}); - assertArrayEquals(diff.getAsArray("foo.baz.quux", null), new String[] {"d", "e", "f"}); + assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("x", "y", "z")); + assertEquals(diff.getAsList("foo.baz.quux", null), Arrays.asList("d", "e", "f")); assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1)); assertThat(diff.getAsInt("foo.bar", null), equalTo(1)); } @@ -562,7 +562,7 @@ public void testUpdateTracer() { AtomicReference> ref = new AtomicReference<>(); settings.addSettingsUpdateConsumer(TransportService.TRACE_LOG_INCLUDE_SETTING, ref::set); settings.applySettings(Settings.builder() - .putArray("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build()); + .putList("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build()); assertNotNull(ref.get().size()); assertEquals(ref.get().size(), 2); assertTrue(ref.get().contains("internal:index/shard/recovery/*")); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 4dfedf519bd16..65d51e126c9f6 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -441,7 +441,7 @@ public void testListSettings() { assertEquals("foo,bar", value.get(0)); List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); - Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); + Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0])); assertTrue(listSetting.exists(builder.build())); value = listSetting.get(builder.build()); assertEquals(input.size(), value.size()); @@ -464,11 +464,11 @@ public void testListSettings() { assertEquals(input.size(), ref.get().size()); assertArrayEquals(ref.get().toArray(new String[0]), input.toArray(new String[0])); - settingUpdater.apply(Settings.builder().putArray("foo.bar", "123").build(), builder.build()); + settingUpdater.apply(Settings.builder().putList("foo.bar", "123").build(), builder.build()); assertEquals(1, ref.get().size()); assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"123"}); - settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putArray("foo.bar", "123").build()); + settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putList("foo.bar", "123").build()); assertEquals(3, ref.get().size()); assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"1", "2", "3"}); @@ -492,17 +492,17 @@ public void testListSettings() { assertEquals(1, value.size()); assertEquals("foo,bar", value.get(0)); - value = settingWithFallback.get(Settings.builder().putArray("foo.bar", "1", "2").build()); + value = settingWithFallback.get(Settings.builder().putList("foo.bar", "1", "2").build()); assertEquals(2, value.size()); assertEquals("1", value.get(0)); assertEquals("2", value.get(1)); - value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").build()); + value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").build()); assertEquals(2, value.size()); assertEquals("3", value.get(0)); assertEquals("4", value.get(1)); - value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").putArray("foo.bar", "1", "2").build()); + value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").putList("foo.bar", "1", "2").build()); assertEquals(2, value.size()); assertEquals("3", value.get(0)); assertEquals("4", value.get(1)); @@ -512,7 +512,7 @@ public void testListSettingAcceptsNumberSyntax() { Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(), Property.Dynamic, Property.NodeScope); List input = Arrays.asList("test", "test1, test2", "test", ",,,,"); - Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0])); + Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0])); // try to parse this really annoying format for (String key : builder.keys()) { assertTrue("key: " + key + " doesn't match", listSetting.match(key)); @@ -601,11 +601,11 @@ public void testGetAllConcreteSettings() { (key) -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope)); Settings settings = Settings.builder() - .putArray("foo.1.bar", "1", "2") - .putArray("foo.2.bar", "3", "4", "5") - .putArray("foo.bar", "6") - .putArray("some.other", "6") - .putArray("foo.3.bar", "6") + .putList("foo.1.bar", "1", "2") + .putList("foo.2.bar", "3", "4", "5") + .putList("foo.bar", "6") + .putList("some.other", "6") + .putList("foo.3.bar", "6") .build(); Stream>> allConcreteSettings = listAffixSetting.getAllConcreteSettings(settings); Map> collect = allConcreteSettings.collect(Collectors.toMap(Setting::getKey, (s) -> s.get(settings))); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 4a9e55c324653..42cb0f1e3e7e3 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -35,11 +35,10 @@ import java.io.ByteArrayInputStream; import java.io.IOException; -import java.io.StringBufferInputStream; -import java.io.StringReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; @@ -47,7 +46,7 @@ import java.util.NoSuchElementException; import java.util.Set; -import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -165,99 +164,99 @@ public void testNames() { public void testThatArraysAreOverriddenCorrectly() throws IOException { // overriding a single value with an array Settings settings = Settings.builder() - .put(Settings.builder().putArray("value", "1").build()) - .put(Settings.builder().putArray("value", "2", "3").build()) + .put(Settings.builder().putList("value", "1").build()) + .put(Settings.builder().putList("value", "2", "3").build()) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("2", "3")); + assertThat(settings.getAsList("value"), contains("2", "3")); settings = Settings.builder() .put(Settings.builder().put("value", "1").build()) - .put(Settings.builder().putArray("value", "2", "3").build()) + .put(Settings.builder().putList("value", "2", "3").build()) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("2", "3")); + assertThat(settings.getAsList("value"), contains("2", "3")); settings = Settings.builder().loadFromSource("value: 1", XContentType.YAML) .loadFromSource("value: [ 2, 3 ]", XContentType.YAML) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("2", "3")); + assertThat(settings.getAsList("value"), contains("2", "3")); settings = Settings.builder() .put(Settings.builder().put("value.with.deep.key", "1").build()) - .put(Settings.builder().putArray("value.with.deep.key", "2", "3").build()) + .put(Settings.builder().putList("value.with.deep.key", "2", "3").build()) .build(); - assertThat(settings.getAsArray("value.with.deep.key"), arrayContaining("2", "3")); + assertThat(settings.getAsList("value.with.deep.key"), contains("2", "3")); // overriding an array with a shorter array settings = Settings.builder() - .put(Settings.builder().putArray("value", "1", "2").build()) - .put(Settings.builder().putArray("value", "3").build()) + .put(Settings.builder().putList("value", "1", "2").build()) + .put(Settings.builder().putList("value", "3").build()) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("3")); + assertThat(settings.getAsList("value"), contains("3")); settings = Settings.builder() - .put(Settings.builder().putArray("value", "1", "2", "3").build()) - .put(Settings.builder().putArray("value", "4", "5").build()) + .put(Settings.builder().putList("value", "1", "2", "3").build()) + .put(Settings.builder().putList("value", "4", "5").build()) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("4", "5")); + assertThat(settings.getAsList("value"), contains("4", "5")); settings = Settings.builder() - .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").build()) - .put(Settings.builder().putArray("value.deep.key", "4", "5").build()) + .put(Settings.builder().putList("value.deep.key", "1", "2", "3").build()) + .put(Settings.builder().putList("value.deep.key", "4", "5").build()) .build(); - assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5")); + assertThat(settings.getAsList("value.deep.key"), contains("4", "5")); // overriding an array with a longer array settings = Settings.builder() - .put(Settings.builder().putArray("value", "1", "2").build()) - .put(Settings.builder().putArray("value", "3", "4", "5").build()) + .put(Settings.builder().putList("value", "1", "2").build()) + .put(Settings.builder().putList("value", "3", "4", "5").build()) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("3", "4", "5")); + assertThat(settings.getAsList("value"), contains("3", "4", "5")); settings = Settings.builder() - .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").build()) - .put(Settings.builder().putArray("value.deep.key", "4", "5").build()) + .put(Settings.builder().putList("value.deep.key", "1", "2", "3").build()) + .put(Settings.builder().putList("value.deep.key", "4", "5").build()) .build(); - assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5")); + assertThat(settings.getAsList("value.deep.key"), contains("4", "5")); // overriding an array with a single value settings = Settings.builder() - .put(Settings.builder().putArray("value", "1", "2").build()) + .put(Settings.builder().putList("value", "1", "2").build()) .put(Settings.builder().put("value", "3").build()) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("3")); + assertThat(settings.getAsList("value"), contains("3")); settings = Settings.builder() - .put(Settings.builder().putArray("value.deep.key", "1", "2").build()) + .put(Settings.builder().putList("value.deep.key", "1", "2").build()) .put(Settings.builder().put("value.deep.key", "3").build()) .build(); - assertThat(settings.getAsArray("value.deep.key"), arrayContaining("3")); + assertThat(settings.getAsList("value.deep.key"), contains("3")); // test that other arrays are not overridden settings = Settings.builder() - .put(Settings.builder().putArray("value", "1", "2", "3").putArray("a", "b", "c").build()) - .put(Settings.builder().putArray("value", "4", "5").putArray("d", "e", "f").build()) + .put(Settings.builder().putList("value", "1", "2", "3").putList("a", "b", "c").build()) + .put(Settings.builder().putList("value", "4", "5").putList("d", "e", "f").build()) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("4", "5")); - assertThat(settings.getAsArray("a"), arrayContaining("b", "c")); - assertThat(settings.getAsArray("d"), arrayContaining("e", "f")); + assertThat(settings.getAsList("value"), contains("4", "5")); + assertThat(settings.getAsList("a"), contains("b", "c")); + assertThat(settings.getAsList("d"), contains("e", "f")); settings = Settings.builder() - .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").putArray("a", "b", "c").build()) - .put(Settings.builder().putArray("value.deep.key", "4", "5").putArray("d", "e", "f").build()) + .put(Settings.builder().putList("value.deep.key", "1", "2", "3").putList("a", "b", "c").build()) + .put(Settings.builder().putList("value.deep.key", "4", "5").putList("d", "e", "f").build()) .build(); - assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5")); - assertThat(settings.getAsArray("a"), notNullValue()); - assertThat(settings.getAsArray("d"), notNullValue()); + assertThat(settings.getAsList("value.deep.key"), contains("4", "5")); + assertThat(settings.getAsList("a"), notNullValue()); + assertThat(settings.getAsList("d"), notNullValue()); // overriding a deeper structure with an array settings = Settings.builder() .put(Settings.builder().put("value.data", "1").build()) - .put(Settings.builder().putArray("value", "4", "5").build()) + .put(Settings.builder().putList("value", "4", "5").build()) .build(); - assertThat(settings.getAsArray("value"), arrayContaining("4", "5")); + assertThat(settings.getAsList("value"), contains("4", "5")); // overriding an array with a deeper structure settings = Settings.builder() - .put(Settings.builder().putArray("value", "4", "5").build()) + .put(Settings.builder().putList("value", "4", "5").build()) .put(Settings.builder().put("value.data", "1").build()) .build(); assertThat(settings.get("value.data"), is("1")); @@ -477,7 +476,7 @@ public void testWriteSettingsToStream() throws IOException { Settings.Builder builder = Settings.builder(); builder.put("test.key1.baz", "blah1"); builder.putNull("test.key3.bar"); - builder.putArray("test.key4.foo", "1", "2"); + builder.putList("test.key4.foo", "1", "2"); builder.setSecureSettings(secureSettings); assertEquals(7, builder.build().size()); Settings.writeSettingsToStream(builder.build(), out); @@ -487,7 +486,7 @@ public void testWriteSettingsToStream() throws IOException { assertEquals("blah1", settings.get("test.key1.baz")); assertNull(settings.get("test.key3.bar")); assertTrue(settings.keySet().contains("test.key3.bar")); - assertArrayEquals(new String[] {"1", "2"}, settings.getAsArray("test.key4.foo")); + assertEquals(Arrays.asList("1", "2"), settings.getAsList("test.key4.foo")); } public void testSecureSettingConflict() { @@ -508,7 +507,7 @@ public void testGetAsArrayFailsOnDuplicates() { public void testToAndFromXContent() throws IOException { Settings settings = Settings.builder() - .putArray("foo.bar.baz", "1", "2", "3") + .putList("foo.bar.baz", "1", "2", "3") .put("foo.foobar", 2) .put("rootfoo", "test") .put("foo.baz", "1,2,3,4") @@ -522,7 +521,7 @@ public void testToAndFromXContent() throws IOException { XContentParser parser = createParser(builder); Settings build = Settings.fromXContent(parser); assertEquals(5, build.size()); - assertArrayEquals(new String[] {"1", "2", "3"}, build.getAsArray("foo.bar.baz")); + assertEquals(Arrays.asList("1", "2", "3"), build.getAsList("foo.bar.baz")); assertEquals(2, build.getAsInt("foo.foobar", 0).intValue()); assertEquals("test", build.get("rootfoo")); assertEquals("1,2,3,4", build.get("foo.baz")); @@ -542,9 +541,9 @@ public void testSimpleJsonSettings() throws Exception { // check array assertNull(settings.get("test1.test3.0")); assertNull(settings.get("test1.test3.1")); - assertThat(settings.getAsArray("test1.test3").length, equalTo(2)); - assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1")); - assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2")); + assertThat(settings.getAsList("test1.test3").size(), equalTo(2)); + assertThat(settings.getAsList("test1.test3").get(0), equalTo("test3-1")); + assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2")); } public void testDuplicateKeysThrowsException() { @@ -575,14 +574,14 @@ public void testDuplicateKeysThrowsException() { public void testToXContent() throws IOException { // this is just terrible but it's the existing behavior! - Settings test = Settings.builder().putArray("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build(); + Settings test = Settings.builder().putList("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build(); XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap())); builder.endObject(); assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string()); - test = Settings.builder().putArray("foo.bar", "1", "2", "3").build(); + test = Settings.builder().putList("foo.bar", "1", "2", "3").build(); builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap())); @@ -615,18 +614,18 @@ public void testSimpleYamlSettings() throws Exception { // check array assertNull(settings.get("test1.test3.0")); assertNull(settings.get("test1.test3.1")); - assertThat(settings.getAsArray("test1.test3").length, equalTo(2)); - assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1")); - assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2")); + assertThat(settings.getAsList("test1.test3").size(), equalTo(2)); + assertThat(settings.getAsList("test1.test3").get(0), equalTo("test3-1")); + assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2")); } public void testYamlLegacyList() throws IOException { Settings settings = Settings.builder() .loadFromStream("foo.yml", new ByteArrayInputStream("foo.bar.baz.0: 1\nfoo.bar.baz.1: 2".getBytes(StandardCharsets.UTF_8)), false).build(); - assertThat(settings.getAsArray("foo.bar.baz").length, equalTo(2)); - assertThat(settings.getAsArray("foo.bar.baz")[0], equalTo("1")); - assertThat(settings.getAsArray("foo.bar.baz")[1], equalTo("2")); + assertThat(settings.getAsList("foo.bar.baz").size(), equalTo(2)); + assertThat(settings.getAsList("foo.bar.baz").get(0), equalTo("1")); + assertThat(settings.getAsList("foo.bar.baz").get(1), equalTo("2")); } public void testIndentation() throws Exception { @@ -675,14 +674,14 @@ public void testReadLegacyFromStream() throws IOException { in.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0)); Settings settings = Settings.readSettingsFromStream(in); assertEquals(2, settings.size()); - assertArrayEquals(new String[]{"0", "1", "2", "3"}, settings.getAsArray("foo.bar")); + assertEquals(Arrays.asList("0", "1", "2", "3"), settings.getAsList("foo.bar")); assertEquals("baz", settings.get("foo.bar.baz")); } public void testWriteLegacyOutput() throws IOException { BytesStreamOutput output = new BytesStreamOutput(); output.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0)); - Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3") + Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3") .put("foo.bar.baz", "baz").putNull("foo.null").build(); Settings.writeSettingsToStream(settings, output); StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes())); @@ -703,7 +702,7 @@ public void testWriteLegacyOutput() throws IOException { in.setVersion(output.getVersion()); Settings readSettings = Settings.readSettingsFromStream(in); assertEquals(3, readSettings.size()); - assertArrayEquals(new String[] {"0", "1", "2", "3"}, readSettings.getAsArray("foo.bar")); + assertEquals(Arrays.asList("0", "1", "2", "3"), readSettings.getAsList("foo.bar")); assertEquals(readSettings.get("foo.bar.baz"), "baz"); assertTrue(readSettings.keySet().contains("foo.null")); assertNull(readSettings.get("foo.null")); @@ -712,18 +711,18 @@ public void testWriteLegacyOutput() throws IOException { public void testReadWriteArray() throws IOException { BytesStreamOutput output = new BytesStreamOutput(); output.setVersion(randomFrom(Version.CURRENT, Version.V_6_1_0)); - Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build(); + Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build(); Settings.writeSettingsToStream(settings, output); StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes())); Settings build = Settings.readSettingsFromStream(in); assertEquals(2, build.size()); - assertArrayEquals(build.getAsArray("foo.bar"), new String[] {"0", "1", "2", "3"}); + assertEquals(build.getAsList("foo.bar"), Arrays.asList("0", "1", "2", "3")); assertEquals(build.get("foo.bar.baz"), "baz"); } public void testCopy() { - Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build(); - assertArrayEquals(new String[] {"0", "1", "2", "3"}, Settings.builder().copy("foo.bar", settings).build().getAsArray("foo.bar")); + Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build(); + assertEquals(Arrays.asList("0", "1", "2", "3"), Settings.builder().copy("foo.bar", settings).build().getAsList("foo.bar")); assertEquals("baz", Settings.builder().copy("foo.bar.baz", settings).build().get("foo.bar.baz")); assertNull(Settings.builder().copy("foo.bar.baz", settings).build().get("test")); assertTrue(Settings.builder().copy("test", settings).build().keySet().contains("test")); diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index ed7cdd4d4243d..1a837b825d867 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -59,7 +59,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import static java.util.Collections.singleton; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -137,7 +136,7 @@ protected MockTransportService build(Settings settings, Version version) { Settings.builder() .put(settings) // trace zenfd actions but keep the default otherwise - .putArray(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME) + .putList(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME) .build(), new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService, namedWriteableRegistry, new NetworkService(Collections.emptyList()), version), diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 0492bc82e5f73..3c7a49a176635 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -179,7 +179,7 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomNonNegativeLong()).build(); Settings hostsSettings = Settings.builder() - .putArray("discovery.zen.ping.unicast.hosts", + .putList("discovery.zen.ping.unicast.hosts", NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())), NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())), NetworkAddress.format(new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort())), @@ -305,7 +305,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort()))}); final Settings hostsSettings = Settings.builder() - .putArray("discovery.zen.ping.unicast.hosts", "UZP_A", "UZP_B", "UZP_C") + .putList("discovery.zen.ping.unicast.hosts", "UZP_A", "UZP_B", "UZP_C") .put("cluster.name", "test") .build(); @@ -589,7 +589,7 @@ public void testResolveReuseExistingNodeConnections() throws ExecutionException, final boolean useHosts = randomBoolean(); final Settings.Builder hostsSettingsBuilder = Settings.builder().put("cluster.name", "test"); if (useHosts) { - hostsSettingsBuilder.putArray("discovery.zen.ping.unicast.hosts", + hostsSettingsBuilder.putList("discovery.zen.ping.unicast.hosts", NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())) ); } else { diff --git a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 51391a8643b48..70df7d33f291c 100644 --- a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -42,7 +42,7 @@ public Environment newEnvironment(Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); return new Environment(build); } @@ -50,7 +50,7 @@ public void testRepositoryResolution() throws IOException { Environment environment = newEnvironment(); assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue()); - environment = newEnvironment(Settings.builder().putArray(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build()); + environment = newEnvironment(Settings.builder().putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build()); assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 42cb4a5811b2e..615a75dda025a 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -483,7 +483,7 @@ public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException public Settings buildEnvSettings(Settings settings) { return Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()) + .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()) .put(settings).build(); } @@ -491,7 +491,7 @@ public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings) Settings build = Settings.builder() .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); return new NodeEnvironment(build, new Environment(build)); } @@ -500,7 +500,7 @@ public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataP .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath) - .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); return new NodeEnvironment(build, new Environment(build)); } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 8d8878fa28251..6be786aff88b5 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -543,7 +543,7 @@ public void testQueryDefaultField() { ); assertThat(index.getDefaultFields(), equalTo(Collections.singletonList("body"))); index.updateIndexMetaData( - newIndexMeta("index", Settings.builder().putArray("index.query.default_field", "body", "title").build()) + newIndexMeta("index", Settings.builder().putList("index.query.default_field", "body", "title").build()) ); assertThat(index.getDefaultFields(), equalTo(Arrays.asList("body", "title"))); } diff --git a/core/src/test/java/org/elasticsearch/index/IndexSortIT.java b/core/src/test/java/org/elasticsearch/index/IndexSortIT.java index bb59bc948805c..c981d88a3d1a8 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSortIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSortIT.java @@ -26,8 +26,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESIntegTestCase; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.io.IOException; @@ -80,7 +78,7 @@ public void testIndexSort() { .put(indexSettings()) .put("index.number_of_shards", "1") .put("index.number_of_replicas", "1") - .putArray("index.sort.field", "date", "numeric_dv", "keyword_dv") + .putList("index.sort.field", "date", "numeric_dv", "keyword_dv") ) .addMapping("test", TEST_MAPPING) .get(); @@ -99,7 +97,7 @@ public void testInvalidIndexSort() { () -> prepareCreate("test") .setSettings(Settings.builder() .put(indexSettings()) - .putArray("index.sort.field", "invalid_field") + .putList("index.sort.field", "invalid_field") ) .addMapping("test", TEST_MAPPING) .get() @@ -110,7 +108,7 @@ public void testInvalidIndexSort() { () -> prepareCreate("test") .setSettings(Settings.builder() .put(indexSettings()) - .putArray("index.sort.field", "numeric") + .putList("index.sort.field", "numeric") ) .addMapping("test", TEST_MAPPING) .get() @@ -121,7 +119,7 @@ public void testInvalidIndexSort() { () -> prepareCreate("test") .setSettings(Settings.builder() .put(indexSettings()) - .putArray("index.sort.field", "keyword") + .putList("index.sort.field", "keyword") ) .addMapping("test", TEST_MAPPING) .get() diff --git a/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java index 74ec1cc02d93f..78569d927be76 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java @@ -76,9 +76,9 @@ public void testSimpleIndexSort() throws IOException { public void testIndexSortWithArrays() throws IOException { Settings settings = Settings.builder() - .putArray("index.sort.field", "field1", "field2") - .putArray("index.sort.order", "asc", "desc") - .putArray("index.sort.missing", "_last", "_first") + .putList("index.sort.field", "field1", "field2") + .putList("index.sort.order", "asc", "desc") + .putList("index.sort.missing", "_last", "_first") .build(); IndexSettings indexSettings = indexSettings(settings); IndexSortConfig config = indexSettings.getIndexSortConfig(); @@ -108,7 +108,7 @@ public void testInvalidIndexSort() throws IOException { public void testInvalidIndexSortWithArray() throws IOException { final Settings settings = Settings.builder() .put("index.sort.field", "field1") - .putArray("index.sort.order", new String[] {"asc", "desc"}) + .putList("index.sort.order", new String[] {"asc", "desc"}) .build(); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexSettings(settings)); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java index 9303159c265b9..d93533ffc80d3 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java @@ -129,9 +129,9 @@ public void testConfigureCamelCaseTokenFilter() throws IOException { .put("index.analysis.filter.testFilter.type", "mock") .put("index.analysis.filter.test_filter.type", "mock") .put("index.analysis.analyzer.custom_analyzer_with_camel_case.tokenizer", "standard") - .putArray("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter") + .putList("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter") .put("index.analysis.analyzer.custom_analyzer_with_snake_case.tokenizer", "standard") - .putArray("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build(); + .putList("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); @@ -209,8 +209,8 @@ public void testNoTypeOrTokenizerErrorMessage() throws IOException { .builder() .put(IndexMetaData.SETTING_VERSION_CREATED, version) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .putArray("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"}) - .putArray("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"}) + .putList("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"}) + .putList("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"}) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java index 4073bbdbbc9c7..e07b4e5b9d435 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.charset.CharacterCodingException; -import java.nio.charset.Charset; import java.nio.charset.MalformedInputException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -50,7 +49,7 @@ public void testParseStemExclusion() { assertThat(set.contains("baz"), is(false)); /* Array */ - settings = Settings.builder().putArray("stem_exclusion", "foo","bar").build(); + settings = Settings.builder().putList("stem_exclusion", "foo","bar").build(); set = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET); assertThat(set.contains("foo"), is(true)); assertThat(set.contains("bar"), is(true)); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java index 66b28ec419a7f..7d8d64e6962d5 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java @@ -42,7 +42,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase { public void testBasics() throws IOException { Settings settings = Settings.builder() - .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase") + .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN); @@ -57,7 +57,7 @@ public void testBasics() throws IOException { public void testUnknownType() { Settings settings = Settings.builder() .put("index.analysis.normalizer.my_normalizer.type", "foobar") - .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding") + .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -78,7 +78,7 @@ public void testTokenizer() throws IOException { public void testCharFilters() throws IOException { Settings settings = Settings.builder() .put("index.analysis.char_filter.my_mapping.type", "mock_char_filter") - .putArray("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping") + .putList("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN); @@ -92,7 +92,7 @@ public void testCharFilters() throws IOException { public void testIllegalFilters() throws IOException { Settings settings = Settings.builder() - .putArray("index.analysis.normalizer.my_normalizer.filter", "mock_forbidden") + .putList("index.analysis.normalizer.my_normalizer.filter", "mock_forbidden") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -102,7 +102,7 @@ public void testIllegalFilters() throws IOException { public void testIllegalCharFilters() throws IOException { Settings settings = Settings.builder() - .putArray("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden") + .putList("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java index 41deb0bd92352..36c9dee10919f 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java @@ -83,11 +83,11 @@ public void testSynonymWordDeleteByAnalyzer() throws IOException { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonym.type", "synonym") - .putArray("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!") + .putList("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!") .put("index.analysis.filter.stop_within_synonym.type", "stop") - .putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch") + .putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch") .put("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym") + .putList("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym") .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try { @@ -104,11 +104,11 @@ public void testExpandSynonymWordDeleteByAnalyzer() throws IOException { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonym_expand.type", "synonym") - .putArray("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!") + .putList("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!") .put("index.analysis.filter.stop_within_synonym.type", "stop") - .putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch") + .putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch") .put("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand") + .putList("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand") .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 3ecef3aa0f514..e67b25b051b4e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -70,9 +70,9 @@ protected Collection> getPlugins() { public void setup() { indexService = createIndex("test", Settings.builder() .put("index.analysis.normalizer.my_lowercase.type", "custom") - .putArray("index.analysis.normalizer.my_lowercase.filter", "lowercase") + .putList("index.analysis.normalizer.my_lowercase.filter", "lowercase") .put("index.analysis.normalizer.my_other_lowercase.type", "custom") - .putArray("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build()); + .putList("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build()); parser = indexService.mapperService().documentMapperParser(); } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index c29172d88afa8..94b55fba61870 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -993,7 +993,7 @@ public void testDefaultField() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); QueryShardContext context = createShardContext(); context.getIndexSettings().updateIndexMetaData( - newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", + newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build()) ); Query query = new QueryStringQueryBuilder("hello") @@ -1008,7 +1008,7 @@ public void testDefaultField() throws Exception { // Reset the default value context.getIndexSettings().updateIndexMetaData( newIndexMeta("index", - context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", "*").build()) + context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build()) ); } diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index efacd3c1faba4..bfc6fd0600493 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -575,7 +575,7 @@ public void testDefaultField() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); QueryShardContext context = createShardContext(); context.getIndexSettings().updateIndexMetaData( - newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", + newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build()) ); Query query = new SimpleQueryStringBuilder("hello") @@ -590,7 +590,7 @@ public void testDefaultField() throws Exception { // Reset the default value context.getIndexSettings().updateIndexMetaData( newIndexMeta("index", - context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", "*").build()) + context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build()) ); } diff --git a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java index ec5e92ef6e376..aa154d9392574 100644 --- a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java @@ -52,15 +52,15 @@ public void setUp() throws Exception { Settings.builder() .put(indexSettings()) .put("index.analysis.filter.syns.type", "synonym") - .putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz") + .putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz") .put("index.analysis.analyzer.lower_syns.type", "custom") .put("index.analysis.analyzer.lower_syns.tokenizer", "standard") - .putArray("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns") + .putList("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns") .put("index.analysis.filter.graphsyns.type", "synonym_graph") - .putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz") + .putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz") .put("index.analysis.analyzer.lower_graphsyns.type", "custom") .put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard") - .putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns") + .putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns") ); assertAcked(builder.addMapping(INDEX, createMapping())); diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 990bfce9db8e2..70010cdfc2224 100644 --- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -30,7 +30,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; @@ -47,7 +46,6 @@ import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; public class MultiMatchQueryTests extends ESSingleNodeTestCase { @@ -57,7 +55,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { public void setup() throws IOException { Settings settings = Settings.builder() .put("index.analysis.filter.syns.type","synonym") - .putArray("index.analysis.filter.syns.synonyms","quick,fast") + .putList("index.analysis.filter.syns.synonyms","quick,fast") .put("index.analysis.analyzer.syns.tokenizer","standard") .put("index.analysis.analyzer.syns.filter","syns").build(); IndexService indexService = createIndex("test", settings); diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java index fc8fc12e75d6a..c3d309b486fa0 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment.NodePath; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -168,7 +167,7 @@ public void testSelectNewPathForShard() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), path) - .putArray(Environment.PATH_DATA_SETTING.getKey(), paths).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build(); NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings)); // Make sure all our mocking above actually worked: diff --git a/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java b/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java index 0a72037b7d8c0..24ce9b487cc24 100644 --- a/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java @@ -21,9 +21,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.FileSwitchDirectory; import org.apache.lucene.store.MMapDirectory; -import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.store.SleepingLockWrapper; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -48,7 +46,7 @@ public void testPreload() throws IOException { private void doTestPreload(String...preload) throws IOException { Settings build = Settings.builder() .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mmapfs") - .putArray(IndexModule.INDEX_STORE_PRE_LOAD_SETTING.getKey(), preload) + .putList(IndexModule.INDEX_STORE_PRE_LOAD_SETTING.getKey(), preload) .build(); IndexSettings settings = IndexSettingsModule.newIndexSettings("foo", build); IndexStore store = new IndexStore(settings); diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index d53dba67e0dc4..9f214082d4b22 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -117,9 +117,9 @@ public void testAnalyzeWithNonDefaultPostionLength() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.builder().put(indexSettings()) .put("index.analysis.filter.syns.type", "synonym") - .putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge") + .putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge") .put("index.analysis.analyzer.custom_syns.tokenizer", "standard") - .putArray("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns"))); + .putList("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns"))); ensureGreen(); AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("say what the fudge").setIndex("test").setAnalyzer("custom_syns").get(); @@ -446,7 +446,7 @@ public void testAnalyzeNormalizedKeywordField() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.builder().put(indexSettings()) .put("index.analysis.normalizer.my_normalizer.type", "custom") - .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase")) + .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase")) .addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer")); ensureGreen("test"); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 34aa2ab3117aa..faf1f65f34bda 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -1358,9 +1358,9 @@ public void testPhrasePrefix() throws IOException { Builder builder = Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.synonym.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") + .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") .put("index.analysis.filter.synonym.type", "synonym") - .putArray("index.analysis.filter.synonym.synonyms", "quick => fast"); + .putList("index.analysis.filter.synonym.synonyms", "quick => fast"); assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping())); @@ -2773,9 +2773,9 @@ public void testSynonyms() throws IOException { Builder builder = Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.synonym.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") + .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") .put("index.analysis.filter.synonym.type", "synonym") - .putArray("index.analysis.filter.synonym.synonyms", "fast,quick"); + .putList("index.analysis.filter.synonym.synonyms", "fast,quick"); assertAcked(prepareCreate("test").setSettings(builder.build()) .addMapping("type1", "field1", diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index ab23dfbe21928..58565b5f264b7 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -38,9 +38,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.rescore.QueryRescoreMode; import org.elasticsearch.search.rescore.QueryRescorerBuilder; -import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; -import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -159,9 +157,9 @@ public void testRescorePhrase() throws Exception { public void testMoreDocs() throws Exception { Builder builder = Settings.builder(); builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); - builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); + builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); - builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); + builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") @@ -237,9 +235,9 @@ public void testMoreDocs() throws Exception { public void testSmallRescoreWindow() throws Exception { Builder builder = Settings.builder(); builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); - builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); + builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); - builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); + builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") @@ -309,9 +307,9 @@ public void testSmallRescoreWindow() throws Exception { public void testRescorerMadeScoresWorse() throws Exception { Builder builder = Settings.builder(); builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); - builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); + builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); - builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); + builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") diff --git a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java index ac72fc6fcb96d..ab8bcb539d6ae 100644 --- a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.query; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -52,7 +51,6 @@ import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -266,10 +264,10 @@ private void setupIndexWithGraph(String index) throws Exception { Settings.builder() .put(indexSettings()) .put("index.analysis.filter.graphsyns.type", "synonym_graph") - .putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz") + .putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz") .put("index.analysis.analyzer.lower_graphsyns.type", "custom") .put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard") - .putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns") + .putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns") ); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject(index).startObject("properties") diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 9f7aaff4489d8..3ad7a83ef19db 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -349,7 +349,7 @@ public void testCommonTermsQueryStackedTokens() throws Exception { .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS,1) .put("index.analysis.filter.syns.type","synonym") - .putArray("index.analysis.filter.syns.synonyms","quick,fast") + .putList("index.analysis.filter.syns.synonyms","quick,fast") .put("index.analysis.analyzer.syns.tokenizer","whitespace") .put("index.analysis.analyzer.syns.filter","syns") ) @@ -1572,9 +1572,9 @@ public void testMatchQueryWithSynonyms() throws IOException { .put("index.analysis.analyzer.index.filter", "lowercase") .put("index.analysis.analyzer.search.type", "custom") .put("index.analysis.analyzer.search.tokenizer", "standard") - .putArray("index.analysis.analyzer.search.filter", "lowercase", "synonym") + .putList("index.analysis.analyzer.search.filter", "lowercase", "synonym") .put("index.analysis.filter.synonym.type", "synonym") - .putArray("index.analysis.filter.synonym.synonyms", "fast, quick")); + .putList("index.analysis.filter.synonym.synonyms", "fast, quick")); assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search")); client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get(); @@ -1602,9 +1602,9 @@ public void testQueryStringWithSynonyms() throws IOException { .put("index.analysis.analyzer.index.filter", "lowercase") .put("index.analysis.analyzer.search.type", "custom") .put("index.analysis.analyzer.search.tokenizer", "standard") - .putArray("index.analysis.analyzer.search.filter", "lowercase", "synonym") + .putList("index.analysis.analyzer.search.filter", "lowercase", "synonym") .put("index.analysis.filter.synonym.type", "synonym") - .putArray("index.analysis.filter.synonym.synonyms", "fast, quick")); + .putList("index.analysis.filter.synonym.synonyms", "fast, quick")); assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search")); client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get(); @@ -1807,7 +1807,7 @@ public void testNGramCopyField() { .put("index.analysis.tokenizer.my_ngram_tokenizer.type", "nGram") .put("index.analysis.tokenizer.my_ngram_tokenizer.min_gram", "1") .put("index.analysis.tokenizer.my_ngram_tokenizer.max_gram", "10") - .putArray("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0])); + .putList("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0])); assertAcked(builder.addMapping("test", "origin", "type=text,copy_to=meta", "meta", "type=text,analyzer=my_ngram_analyzer")); // we only have ngrams as the index analyzer so searches will get standard analyzer diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 3cbee6adc4161..01b16bb9fb698 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -528,9 +528,9 @@ public void testThatSynonymsWork() throws Exception { Settings.Builder settingsBuilder = Settings.builder() .put("analysis.analyzer.suggest_analyzer_synonyms.type", "custom") .put("analysis.analyzer.suggest_analyzer_synonyms.tokenizer", "standard") - .putArray("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms") + .putList("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms") .put("analysis.filter.my_synonyms.type", "synonym") - .putArray("analysis.filter.my_synonyms.synonyms", "foo,renamed"); + .putList("analysis.filter.my_synonyms.synonyms", "foo,renamed"); completionMappingBuilder.searchAnalyzer("suggest_analyzer_synonyms").indexAnalyzer("suggest_analyzer_synonyms"); createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder); @@ -806,7 +806,7 @@ public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exce public void testThatSuggestStopFilterWorks() throws Exception { Settings.Builder settingsBuilder = Settings.builder() .put("index.analysis.analyzer.stoptest.tokenizer", "standard") - .putArray("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter") + .putList("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter") .put("index.analysis.filter.suggest_stop_filter.type", "stop") .put("index.analysis.filter.suggest_stop_filter.remove_trailing", false); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 541cafc7962b2..b0b655b0f8b2a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.TemplateScript; @@ -173,7 +172,7 @@ public void testSuggestModes() throws IOException { .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0) .put("index.analysis.analyzer.biword.tokenizer", "standard") - .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase") + .putList("index.analysis.analyzer.biword.filter", "shingler", "lowercase") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 3)); @@ -253,7 +252,7 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.biword.tokenizer", "standard") - .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase") + .putList("index.analysis.analyzer.biword.filter", "shingler", "lowercase") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 3)); @@ -427,7 +426,7 @@ public void testStopwordsOnlyPhraseSuggest() throws IOException { assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd").setSettings( Settings.builder() .put("index.analysis.analyzer.stopwd.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.stopwd.filter", "stop") + .putList("index.analysis.analyzer.stopwd.filter", "stop") )); ensureGreen(); index("test", "typ1", "1", "body", "this is a test"); @@ -444,9 +443,9 @@ public void testPrefixLength() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put("index.analysis.analyzer.body.tokenizer", "standard") - .putArray("index.analysis.analyzer.body.filter", "lowercase") + .putList("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") - .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") + .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) @@ -482,9 +481,9 @@ public void testBasicPhraseSuggest() throws IOException, URISyntaxException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.body.tokenizer", "standard") - .putArray("index.analysis.analyzer.body.filter", "lowercase") + .putList("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") - .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") + .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) @@ -615,9 +614,9 @@ public void testSizeParam() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put("index.analysis.analyzer.body.tokenizer", "standard") - .putArray("index.analysis.analyzer.body.filter", "lowercase") + .putList("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") - .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") + .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) @@ -685,7 +684,7 @@ public void testShardFailures() throws IOException, InterruptedException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.suggest.tokenizer", "standard") - .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") + .putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) @@ -745,7 +744,7 @@ public void testEmptyShards() throws IOException, InterruptedException { assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.suggest.tokenizer", "standard") - .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") + .putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) @@ -781,7 +780,7 @@ public void testSearchForRarePhrase() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.body.tokenizer", "standard") - .putArray("index.analysis.analyzer.body.filter", "lowercase", "my_shingle") + .putList("index.analysis.analyzer.body.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) @@ -836,7 +835,7 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. .put("index.analysis.analyzer.text.tokenizer", "standard") - .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") + .putList("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) @@ -1026,7 +1025,7 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. .put("index.analysis.analyzer.text.tokenizer", "standard") - .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") + .putList("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 5883d3a5645cd..a5e92d89906cc 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1827,7 +1827,7 @@ public void testChangeSettingsOnRestore() throws Exception { .put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s") .put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") - .putArray("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym") + .putList("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym") .put("index.analysis.filter.my_synonym.type", "synonym") .put("index.analysis.filter.my_synonym.synonyms", "foo => bar"); diff --git a/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index aa4c7415a4c45..8e0c039176207 100644 --- a/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -125,8 +125,8 @@ public void testGroupClusterIndices() throws IOException { transportService.start(); transportService.acceptIncomingRequests(); Settings.Builder builder = Settings.builder(); - builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString()); - builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); + builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString()); + builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) { assertFalse(service.isCrossClusterSearchEnabled()); service.initializeRemoteClusters(); @@ -171,8 +171,8 @@ public void testIncrementallyAddClusters() throws IOException { transportService.start(); transportService.acceptIncomingRequests(); Settings.Builder builder = Settings.builder(); - builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString()); - builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); + builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString()); + builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) { assertFalse(service.isCrossClusterSearchEnabled()); service.initializeRemoteClusters(); @@ -225,9 +225,9 @@ public void testRemoteNodeAttribute() throws IOException, InterruptedException { transportService.start(); transportService.acceptIncomingRequests(); final Settings.Builder builder = Settings.builder(); - builder.putArray( + builder.putList( "search.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); - builder.putArray( + builder.putList( "search.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) { @@ -302,9 +302,9 @@ public void testCollectNodes() throws InterruptedException, IOException { transportService.start(); transportService.acceptIncomingRequests(); final Settings.Builder builder = Settings.builder(); - builder.putArray( + builder.putList( "search.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); - builder.putArray( + builder.putList( "search.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) { diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 6f8cbb6a222d2..a87f428fec51e 100644 --- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -180,9 +180,9 @@ public void testExplainMatchPhrasePrefix() { assertAcked(prepareCreate("test").setSettings( Settings.builder().put(indexSettings()) .put("index.analysis.filter.syns.type", "synonym") - .putArray("index.analysis.filter.syns.synonyms", "one,two") + .putList("index.analysis.filter.syns.synonyms", "one,two") .put("index.analysis.analyzer.syns.tokenizer", "standard") - .putArray("index.analysis.analyzer.syns.filter", "syns") + .putList("index.analysis.analyzer.syns.filter", "syns") ).addMapping("test", "field","type=text,analyzer=syns")); ensureGreen(); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java index ba815a8796bc2..be1f2495f0b23 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java @@ -29,6 +29,7 @@ import java.util.Arrays; import java.util.HashSet; +import java.util.List; import java.util.Set; /** @@ -53,10 +54,10 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory { CJKBigramFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); outputUnigrams = settings.getAsBoolean("output_unigrams", false); - final String[] asArray = settings.getAsArray("ignored_scripts"); + final List asArray = settings.getAsList("ignored_scripts"); Set scripts = new HashSet<>(Arrays.asList("han", "hiragana", "katakana", "hangul")); if (asArray != null) { - scripts.removeAll(Arrays.asList(asArray)); + scripts.removeAll(asArray); } int flags = 0; for (String script : scripts) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java index 9ee889e3af610..760c1c79ba4cd 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java @@ -26,6 +26,7 @@ import org.elasticsearch.index.analysis.AbstractCharFilterFactory; import java.io.Reader; +import java.util.List; import java.util.Set; import static java.util.Collections.unmodifiableSet; @@ -36,8 +37,8 @@ public class HtmlStripCharFilterFactory extends AbstractCharFilterFactory { HtmlStripCharFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name); - String[] escapedTags = settings.getAsArray("escaped_tags"); - if (escapedTags.length > 0) { + List escapedTags = settings.getAsList("escaped_tags"); + if (escapedTags.size() > 0) { this.escapedTags = unmodifiableSet(newHashSet(escapedTags)); } else { this.escapedTags = null; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java index 4da560836eb13..0f94b521e4b7d 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java @@ -27,8 +27,8 @@ import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; -import java.util.Arrays; import java.util.HashSet; +import java.util.List; import java.util.Set; /** @@ -48,12 +48,12 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory { KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - final String[] arrayKeepTypes = settings.getAsArray(KEEP_TYPES_KEY, null); + final List arrayKeepTypes = settings.getAsList(KEEP_TYPES_KEY, null); if ((arrayKeepTypes == null)) { throw new IllegalArgumentException("keep_types requires `" + KEEP_TYPES_KEY + "` to be configured"); } - this.keepTypes = new HashSet<>(Arrays.asList(arrayKeepTypes)); + this.keepTypes = new HashSet<>(arrayKeepTypes); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java index 521e89b35e235..df67f24cc7f5f 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java @@ -22,7 +22,6 @@ import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.KeepWordFilter; -import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -31,6 +30,8 @@ import org.elasticsearch.index.analysis.StopTokenFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; +import java.util.List; + /** * A {@link TokenFilterFactory} for {@link KeepWordFilter}. This filter only * keep tokens that are contained in the term set configured via @@ -61,7 +62,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory { KeepWordFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - final String[] arrayKeepWords = settings.getAsArray(KEEP_WORDS_KEY, null); + final List arrayKeepWords = settings.getAsList(KEEP_WORDS_KEY, null); final String keepWordsPath = settings.get(KEEP_WORDS_PATH_KEY, null); if ((arrayKeepWords == null && keepWordsPath == null) || (arrayKeepWords != null && keepWordsPath != null)) { // we don't allow both or none diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java index bf6315dd12193..7e69e44ffff24 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import java.util.List; import java.util.regex.Pattern; public class PatternCaptureGroupTokenFilterFactory extends AbstractTokenFilterFactory { @@ -37,13 +38,13 @@ public class PatternCaptureGroupTokenFilterFactory extends AbstractTokenFilterFa PatternCaptureGroupTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); - String[] regexes = settings.getAsArray(PATTERNS_KEY, null, false); + List regexes = settings.getAsList(PATTERNS_KEY, null, false); if (regexes == null) { throw new IllegalArgumentException("required setting '" + PATTERNS_KEY + "' is missing for token filter [" + name + "]"); } - patterns = new Pattern[regexes.length]; - for (int i = 0; i < regexes.length; i++) { - patterns[i] = Pattern.compile(regexes[i]); + patterns = new Pattern[regexes.size()]; + for (int i = 0; i < regexes.size(); i++) { + patterns[i] = Pattern.compile(regexes.get(i)); } preserveOriginal = settings.getAsBoolean(PRESERVE_ORIG_KEY, true); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java index da9ab1090c174..8efc0d5941f9e 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java @@ -56,7 +56,7 @@ public void testDefault() throws IOException { public void testWithoutCommonWordsMatch() throws IOException { { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_default.type", "common_grams") - .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") + .putList("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); @@ -75,7 +75,7 @@ public void testWithoutCommonWordsMatch() throws IOException { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_default.type", "common_grams") .put("index.analysis.filter.common_grams_default.query_mode", false) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") + .putList("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") .build(); ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); { @@ -94,7 +94,7 @@ public void testSettings() throws IOException { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_1.type", "common_grams") .put("index.analysis.filter.common_grams_1.ignore_case", true) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") + .putList("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") .build(); ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_1"); @@ -109,7 +109,7 @@ public void testSettings() throws IOException { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_2.type", "common_grams") .put("index.analysis.filter.common_grams_2.ignore_case", false) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") + .putList("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .build(); ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_2"); @@ -122,7 +122,7 @@ public void testSettings() throws IOException { } { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_3.type", "common_grams") - .putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are") + .putList("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); @@ -166,7 +166,7 @@ public void testQueryModeSettings() throws IOException { { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_1.type", "common_grams") .put("index.analysis.filter.common_grams_1.query_mode", true) - .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") + .putList("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") .put("index.analysis.filter.common_grams_1.ignore_case", true) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); @@ -181,7 +181,7 @@ public void testQueryModeSettings() throws IOException { { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_2.type", "common_grams") .put("index.analysis.filter.common_grams_2.query_mode", true) - .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") + .putList("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .put("index.analysis.filter.common_grams_2.ignore_case", false) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); @@ -196,7 +196,7 @@ public void testQueryModeSettings() throws IOException { { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_3.type", "common_grams") .put("index.analysis.filter.common_grams_3.query_mode", true) - .putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") + .putList("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); @@ -210,7 +210,7 @@ public void testQueryModeSettings() throws IOException { { Settings settings = Settings.builder().put("index.analysis.filter.common_grams_4.type", "common_grams") .put("index.analysis.filter.common_grams_4.query_mode", true) - .putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are") + .putList("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java index bb1f2a55f7cb4..6b4682d04a128 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -71,7 +71,7 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException { .put("analysis.tokenizer.autocomplete.token_chars", "letter,digit") .put("analysis.tokenizer.autocomplete.type", "nGram") .put("analysis.filter.wordDelimiter.type", "word_delimiter") - .putArray("analysis.filter.wordDelimiter.type_table", + .putList("analysis.filter.wordDelimiter.type_table", "& => ALPHANUM", "| => ALPHANUM", "! => ALPHANUM", "? => ALPHANUM", ". => ALPHANUM", "- => ALPHANUM", "# => ALPHANUM", "% => ALPHANUM", "+ => ALPHANUM", @@ -88,10 +88,10 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException { .put("analysis.filter.wordDelimiter.catenate_all", false) .put("analysis.analyzer.autocomplete.tokenizer", "autocomplete") - .putArray("analysis.analyzer.autocomplete.filter", + .putList("analysis.analyzer.autocomplete.filter", "lowercase", "wordDelimiter") .put("analysis.analyzer.search_autocomplete.tokenizer", "whitespace") - .putArray("analysis.analyzer.search_autocomplete.filter", + .putList("analysis.analyzer.search_autocomplete.filter", "lowercase", "wordDelimiter"))); client().prepareIndex("test", "test", "1") .setSource("name", "ARCOTEL Hotels Deutschland").get(); @@ -121,7 +121,7 @@ public void testMultiPhraseCutoff() throws IOException { .put("analysis.filter.wordDelimiter.catenate_numbers", true) .put("analysis.filter.wordDelimiter.catenate_all", false) .put("analysis.analyzer.custom_analyzer.tokenizer", "whitespace") - .putArray("analysis.analyzer.custom_analyzer.filter", + .putList("analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter")) ); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java index a7b3c6e61f006..e9248c3d21289 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java @@ -76,7 +76,7 @@ public void testKeepWordsPathSettings() { } settings = Settings.builder().put(settings) - .putArray("index.analysis.filter.non_broken_keep_filter.keep_words", "test") + .putList("index.analysis.filter.non_broken_keep_filter.keep_words", "test") .build(); try { // test our none existing setup is picked up diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java index 4df1fb780e932..a19882d6faa00 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java @@ -38,7 +38,7 @@ public void testKeepTypes() throws IOException { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.keep_numbers.type", "keep_types") - .putArray("index.analysis.filter.keep_numbers.types", new String[] {"", ""}) + .putList("index.analysis.filter.keep_numbers.types", new String[] {"", ""}) .build(); ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers"); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java index 081580a6ae93a..f454e8c776c12 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java @@ -42,9 +42,9 @@ public void testCreateIndexWithMassiveWordList() { .put("index.number_of_shards", 1) .put("analysis.analyzer.test_analyzer.type", "custom") .put("analysis.analyzer.test_analyzer.tokenizer", "standard") - .putArray("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase") + .putList("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase") .put("analysis.filter.dictionary_decompounder.type", "dictionary_decompounder") - .putArray("analysis.filter.dictionary_decompounder.word_list", wordList) + .putList("analysis.filter.dictionary_decompounder.word_list", wordList) ).get(); } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java index 24efd89b7e0c8..3f4641c7c189b 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java @@ -78,7 +78,7 @@ public void testNoTokenChars() throws IOException { final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().build(); final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 4) - .putArray("token_chars", new String[0]).build(); + .putList("token_chars", new String[0]).build(); Tokenizer tokenizer = new NGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings) .create(); tokenizer.setReader(new StringReader("1.34")); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java index 14fa5922c1d90..fa1999cf17e39 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java @@ -37,6 +37,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -63,7 +64,7 @@ private ICUTokenizerConfig getIcuConfig(Environment env, Settings settings) { Map tailored = new HashMap<>(); try { - String[] ruleFiles = settings.getAsArray(RULE_FILES); + List ruleFiles = settings.getAsList(RULE_FILES); for (String scriptAndResourcePath : ruleFiles) { int colonPos = scriptAndResourcePath.indexOf(":"); diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java index ff4ab4943e30f..52dabef7c5dff 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.analysis; -import java.util.Arrays; import java.util.HashSet; +import java.util.List; import org.apache.commons.codec.Encoder; import org.apache.commons.codec.language.Caverphone1; @@ -50,7 +50,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory { private final Encoder encoder; private final boolean replace; private int maxcodelength; - private String[] languageset; + private List languageset; private NameType nametype; private RuleType ruletype; @@ -82,7 +82,7 @@ public PhoneticTokenFilterFactory(IndexSettings indexSettings, Environment envir this.maxcodelength = settings.getAsInt("max_code_len", 4); } else if ("bm".equalsIgnoreCase(encodername) || "beider_morse".equalsIgnoreCase(encodername) || "beidermorse".equalsIgnoreCase(encodername)) { this.encoder = null; - this.languageset = settings.getAsArray("languageset"); + this.languageset = settings.getAsList("languageset"); String ruleType = settings.get("rule_type", "approx"); if ("approx".equalsIgnoreCase(ruleType)) { ruletype = RuleType.APPROX; @@ -117,7 +117,7 @@ public TokenStream create(TokenStream tokenStream) { if (encoder == null) { if (ruletype != null && nametype != null) { if (languageset != null) { - final LanguageSet languages = LanguageSet.from(new HashSet<>(Arrays.asList(languageset))); + final LanguageSet languages = LanguageSet.from(new HashSet<>(languageset)); return new BeiderMorseFilter(tokenStream, new PhoneticEngine(nametype, ruletype, true), languages); } return new BeiderMorseFilter(tokenStream, new PhoneticEngine(nametype, ruletype, true)); diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index f3685278dc6b9..e7986cb878e41 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -229,7 +229,7 @@ public void testFilterByTags() throws InterruptedException { public void testFilterByMultipleTags() throws InterruptedException { int nodes = randomIntBetween(5, 10); Settings nodeSettings = Settings.builder() - .putArray(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod") + .putList(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod") .build(); int prodInstances = 0; diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index 5ae30c74a3226..31ea9bdb1c21e 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -128,7 +128,7 @@ public void testNodesWithDifferentTagsAndOneTagSet() { Settings nodeSettings = Settings.builder() .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") - .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") + .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") .build(); mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); @@ -140,7 +140,7 @@ public void testNodesWithDifferentTagsAndTwoTagSet() { Settings nodeSettings = Settings.builder() .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") - .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") + .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") .build(); mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); @@ -162,7 +162,7 @@ public void testNodesWithSameTagsAndOneTagSet() { Settings nodeSettings = Settings.builder() .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") - .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") + .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") .build(); mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); @@ -173,7 +173,7 @@ public void testNodesWithSameTagsAndTwoTagsSet() { Settings nodeSettings = Settings.builder() .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") - .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") + .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") .build(); mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); @@ -183,7 +183,7 @@ public void testNodesWithSameTagsAndTwoTagsSet() { public void testMultipleZonesAndTwoNodesInSameZone() { Settings nodeSettings = Settings.builder() .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) - .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") + .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); @@ -193,7 +193,7 @@ public void testMultipleZonesAndTwoNodesInSameZone() { public void testMultipleZonesAndTwoNodesInDifferentZones() { Settings nodeSettings = Settings.builder() .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) - .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") + .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); @@ -206,7 +206,7 @@ public void testMultipleZonesAndTwoNodesInDifferentZones() { public void testZeroNode43() { Settings nodeSettings = Settings.builder() .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) - .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") + .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); @@ -226,7 +226,7 @@ public void testIllegalSettingsMissingAllRequired() { public void testIllegalSettingsMissingProject() { Settings nodeSettings = Settings.builder() - .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") + .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); try { @@ -258,7 +258,7 @@ public void testIllegalSettingsMissingZone() { public void testNoRegionReturnsEmptyList() { Settings nodeSettings = Settings.builder() .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) - .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a") + .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a") .build(); mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index a2afbccf27a53..6d609bd08d2c6 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -39,7 +39,7 @@ public class AzureRepositorySettingsTests extends ESTestCase { private AzureRepository azureRepository(Settings settings) throws StorageException, IOException, URISyntaxException { Settings internalSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()) + .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()) .put(settings) .build(); return new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), new Environment(internalSettings), diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java index 50a9f3426acbd..9ba59f8d49727 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java @@ -80,7 +80,7 @@ public void testEnvironmentPaths() throws Exception { Settings.Builder settingsBuilder = Settings.builder(); settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.resolve("home").toString()); - settingsBuilder.putArray(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(), + settingsBuilder.putList(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(), esHome.resolve("data2").toString()); settingsBuilder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), esHome.resolve("custom").toString()); settingsBuilder.put(Environment.PATH_LOGS_SETTING.getKey(), esHome.resolve("logs").toString()); @@ -153,7 +153,7 @@ public void testDuplicateDataPaths() throws IOException { Settings .builder() .put(Environment.PATH_HOME_SETTING.getKey(), home.toString()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString()) + .putList(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString()) .build(); final Environment environment = new Environment(settings); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java index 3eebf4a2f6481..8192a8c8a29c5 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java @@ -50,7 +50,7 @@ public void testMissingWritePermission() throws IOException { PosixFilePermission.OWNER_READ))); Settings build = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); IOException ioException = expectThrows(IOException.class, () -> { new NodeEnvironment(build, new Environment(build)); }); @@ -70,7 +70,7 @@ public void testMissingWritePermissionOnIndex() throws IOException { PosixFilePermission.OWNER_READ))); Settings build = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); IOException ioException = expectThrows(IOException.class, () -> { new NodeEnvironment(build, new Environment(build)); }); @@ -95,7 +95,7 @@ public void testMissingWritePermissionOnShard() throws IOException { PosixFilePermission.OWNER_READ))); Settings build = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); IOException ioException = expectThrows(IOException.class, () -> { new NodeEnvironment(build, new Environment(build)); }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index a0777de5dc32e..243ab11e61fcb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -39,7 +39,6 @@ import org.apache.logging.log4j.status.StatusLogger; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.apache.lucene.util.SetOnce; import org.apache.lucene.util.TestRuleMarkFailure; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TimeUnits; @@ -134,7 +133,6 @@ import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BooleanSupplier; import java.util.function.Consumer; @@ -812,7 +810,7 @@ public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException Settings build = Settings.builder() .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); return new NodeEnvironment(build, new Environment(build)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index 7e3f9a21e4386..f873ec4fb933c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -129,7 +129,7 @@ public Settings nodeSettings(int nodeOrdinal) { unicastHosts[i] = IP_ADDR + ":" + (unicastHostPorts[unicastHostOrdinals[i]]); } } - builder.putArray("discovery.zen.ping.unicast.hosts", unicastHosts); + builder.putList("discovery.zen.ping.unicast.hosts", unicastHosts); return builder.put(super.nodeSettings(nodeOrdinal)).build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index da43f116d4245..504097b4b6e7a 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -2455,8 +2455,8 @@ public void testTransportProfilesWithPortAndHost() { .put("transport.profiles.some_profile.port", "8900-9000") .put("transport.profiles.some_profile.bind_host", "_local:ipv4_") .put("transport.profiles.some_other_profile.port", "8700-8800") - .putArray("transport.profiles.some_other_profile.bind_host", hosts) - .putArray("transport.profiles.some_other_profile.publish_host", "_local:ipv4_") + .putList("transport.profiles.some_other_profile.bind_host", hosts) + .putList("transport.profiles.some_other_profile.publish_host", "_local:ipv4_") .build(), version0, null, true)) { serviceC.start(); From a4436195f8f05627cf71ffd4657cad4f5c84492a Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 9 Oct 2017 10:45:03 +0200 Subject: [PATCH 07/10] Set minimum_master_nodes on rolling-upgrade test (#26911) The rolling-upgrade test was only writing the "minimum_master_nodes" setting to the configuration file of the old nodes, but not the upgraded ones. Also changes the value of "minimum_master_nodes" from "number_of_nodes" to "(number_of_nodes / 2) + 1". --- .../gradle/test/ClusterConfiguration.groovy | 8 +++----- .../gradle/test/ClusterFormationTasks.groovy | 15 ++++++++------- qa/rolling-upgrade/build.gradle | 2 ++ 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index ab618a0fdc7f7..af84a44233aa3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -63,13 +63,11 @@ class ClusterConfiguration { boolean debug = false /** - * if true each node will be configured with discovery.zen.minimum_master_nodes set - * to the total number of nodes in the cluster. This will also cause that each node has `0s` state recovery - * timeout which can lead to issues if for instance an existing clusterstate is expected to be recovered - * before any tests start + * Configuration of the setting discovery.zen.minimum_master_nodes on the nodes. + * In case of more than one node, this defaults to (number of nodes / 2) + 1 */ @Input - boolean useMinimumMasterNodes = true + Closure minimumMasterNodes = { getNumNodes() > 1 ? getNumNodes().intdiv(2) + 1 : -1 } @Input String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') + diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 217ecb4ed90fb..14074f241df91 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -311,13 +311,14 @@ class ClusterFormationTasks { // Define a node attribute so we can test that it exists 'node.attr.testattr' : 'test' ] - // we set min master nodes to the total number of nodes in the cluster and - // basically skip initial state recovery to allow the cluster to form using a realistic master election - // this means all nodes must be up, join the seed node and do a master election. This will also allow new and - // old nodes in the BWC case to become the master - if (node.config.useMinimumMasterNodes && node.config.numNodes > 1) { - esConfig['discovery.zen.minimum_master_nodes'] = node.config.numNodes - esConfig['discovery.initial_state_timeout'] = '0s' // don't wait for state.. just start up quickly + int minimumMasterNodes = node.config.minimumMasterNodes.call() + if (minimumMasterNodes > 0) { + esConfig['discovery.zen.minimum_master_nodes'] = minimumMasterNodes + } + if (node.config.numNodes > 1) { + // don't wait for state.. just start up quickly + // this will also allow new and old nodes in the BWC case to become the master + esConfig['discovery.initial_state_timeout'] = '0s' } esConfig['node.max_local_storage_nodes'] = node.config.numNodes esConfig['http.port'] = node.config.httpPort diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index b5f841601308e..fc3cf88b272f1 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -61,6 +61,7 @@ for (Version version : wireCompatVersions) { distribution = 'zip' clusterName = 'rolling-upgrade' unicastTransportUri = { seedNode, node, ant -> oldClusterTest.nodes.get(0).transportUri() } + minimumMasterNodes = { 2 } /* Override the data directory so the new node always gets the node we * just stopped's data directory. */ dataDir = { nodeNumber -> oldClusterTest.nodes[1].dataDir } @@ -81,6 +82,7 @@ for (Version version : wireCompatVersions) { distribution = 'zip' clusterName = 'rolling-upgrade' unicastTransportUri = { seedNode, node, ant -> mixedClusterTest.nodes.get(0).transportUri() } + minimumMasterNodes = { 2 } /* Override the data directory so the new node always gets the node we * just stopped's data directory. */ dataDir = { nodeNumber -> oldClusterTest.nodes[0].dataDir} From 19dc629e6d78520ec7ee06ca903584bf57f20afb Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 9 Oct 2017 13:20:42 +0200 Subject: [PATCH 08/10] Test query builder bwc against previous supported versions instead of just the current version. Relates to #25456 --- qa/query-builder-bwc/build.gradle | 14 ++++---------- .../org/elasticsearch/bwc/QueryBuilderBWCIT.java | 10 ++++++++++ 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/qa/query-builder-bwc/build.gradle b/qa/query-builder-bwc/build.gradle index dbc438f673875..f1e7ad6f640f0 100644 --- a/qa/query-builder-bwc/build.gradle +++ b/qa/query-builder-bwc/build.gradle @@ -30,12 +30,7 @@ task bwcTest { group = 'verification' } -// For now test against the current version: -Version currentVersion = Version.fromString(VersionProperties.elasticsearch.minus('-SNAPSHOT')) -Version[] versions = [currentVersion] -// TODO: uncomment when there is a released version with: https://github.com/elastic/elasticsearch/pull/25456 -// versions = indexCompatVersions -for (Version version : versions) { +for (Version version : indexCompatVersions) { String baseName = "v${version}" Task oldQueryBuilderTest = tasks.create(name: "${baseName}#oldQueryBuilderTest", type: RestIntegTestTask) { @@ -48,9 +43,8 @@ for (Version version : versions) { configure(extensions.findByName("${baseName}#oldQueryBuilderTestCluster")) { distribution = 'zip' - // TODO: uncomment when there is a released version with: https://github.com/elastic/elasticsearch/pull/25456 - // bwcVersion = version - // numBwcNodes = 1 + bwcVersion = version + numBwcNodes = 1 numNodes = 1 clusterName = 'query_builder_bwc' setting 'http.content_type.required', 'true' @@ -89,7 +83,7 @@ test.enabled = false // no unit tests for rolling upgrades, only the rest integr // basic integ tests includes testing bwc against the most recent version task integTest { if (project.bwc_tests_enabled) { - dependsOn = ["v${versions[-1]}#bwcTest"] + dependsOn = ["v${indexCompatVersions[-1]}#bwcTest"] } } diff --git a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java b/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java index ff45e6212d7e9..bff28d6f375bf 100644 --- a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java +++ b/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java @@ -61,6 +61,16 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +/** + * An integration test that tests whether percolator queries stored in older supported ES version can still be read by the + * current ES version. Percolator queries are stored in the binary format in a dedicated doc values field (see + * PercolatorFieldMapper#createQueryBuilderField(...) method). Using the query builders writable contract. This test + * does best effort verifying that we don't break bwc for query builders between the first previous major version and + * the latest current major release. + * + * The queries to test are specified in json format, which turns out to work because we tend break here rarely. If the + * json format of a query being tested here then feel free to change this. + */ public class QueryBuilderBWCIT extends ESRestTestCase { private static final List CANDIDATES = new ArrayList<>(); From 1d4f70210ff85193ffd733484c678f86491c653e Mon Sep 17 00:00:00 2001 From: kel Date: Mon, 9 Oct 2017 07:02:38 -0500 Subject: [PATCH 09/10] Calculate and cache result when advanceExact is called (#26920) Cache final result instead of result of advanceExact. Fix SortedNumericDoubleValues does not test MEDIAN mode Replace deprecated random string generation method --- .../elasticsearch/search/MultiValueMode.java | 32 +++++----- .../search/MultiValueModeTests.java | 63 +++++++++++++++---- 2 files changed, 66 insertions(+), 29 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java index 2d85c379f54fd..b2ee4b8ffbd5f 100644 --- a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -416,11 +416,11 @@ public NumericDocValues select(final SortedNumericDocValues values, final long m if (singleton != null) { return new AbstractNumericDocValues() { - private boolean hasValue; + private long value; @Override public boolean advanceExact(int target) throws IOException { - hasValue = singleton.advanceExact(target); + this.value = singleton.advanceExact(target) ? singleton.longValue() : missingValue; return true; } @@ -431,17 +431,17 @@ public int docID() { @Override public long longValue() throws IOException { - return hasValue ? singleton.longValue() : missingValue; + return this.value; } }; } else { return new AbstractNumericDocValues() { - private boolean hasValue; + private long value; @Override public boolean advanceExact(int target) throws IOException { - hasValue = values.advanceExact(target); + this.value = values.advanceExact(target) ? pick(values) : missingValue; return true; } @@ -452,7 +452,7 @@ public int docID() { @Override public long longValue() throws IOException { - return hasValue ? pick(values) : missingValue; + return value; } }; } @@ -533,35 +533,33 @@ public NumericDoubleValues select(final SortedNumericDoubleValues values, final final NumericDoubleValues singleton = FieldData.unwrapSingleton(values); if (singleton != null) { return new NumericDoubleValues() { - - private boolean hasValue; + private double value; @Override public boolean advanceExact(int doc) throws IOException { - hasValue = singleton.advanceExact(doc); + this.value = singleton.advanceExact(doc) ? singleton.doubleValue() : missingValue; return true; } @Override public double doubleValue() throws IOException { - return hasValue ? singleton.doubleValue() : missingValue; + return this.value; } - }; } else { return new NumericDoubleValues() { - private boolean hasValue; + private double value; @Override public boolean advanceExact(int target) throws IOException { - hasValue = values.advanceExact(target); + value = values.advanceExact(target) ? pick(values) : missingValue; return true; } @Override public double doubleValue() throws IOException { - return hasValue ? pick(values) : missingValue; + return this.value; } }; } @@ -638,17 +636,17 @@ public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef } return new AbstractBinaryDocValues() { - private boolean hasValue; + private BytesRef value; @Override public boolean advanceExact(int target) throws IOException { - hasValue = singleton.advanceExact(target); + this.value = singleton.advanceExact(target) ? singleton.binaryValue() : missingValue; return true; } @Override public BytesRef binaryValue() throws IOException { - return hasValue ? singleton.binaryValue() : missingValue; + return this.value; } }; } else { diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index df18b00528c66..d9eb45013263d 100644 --- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.search; -import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.NumericDocValues; @@ -160,6 +158,8 @@ private void verifySortedNumeric(Supplier supplier, int for (int i = 0; i < maxDoc; ++i) { assertTrue(selected.advanceExact(i)); final long actual = selected.longValue(); + verifyLongValueCanCalledMoreThanOnce(selected, actual); + long expected = 0; if (values.advanceExact(i) == false) { expected = missingValue; @@ -203,6 +203,12 @@ private void verifySortedNumeric(Supplier supplier, int } } + private void verifyLongValueCanCalledMoreThanOnce(NumericDocValues values, long expected) throws IOException { + for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) { + assertEquals(expected, values.longValue()); + } + } + private void verifySortedNumeric(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { @@ -212,6 +218,8 @@ private void verifySortedNumeric(Supplier supplier, int for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); final long actual = selected.longValue(); + verifyLongValueCanCalledMoreThanOnce(selected, actual); + long expected = 0; if (mode == MultiValueMode.MAX) { expected = Long.MIN_VALUE; @@ -320,14 +328,13 @@ public int docValueCount() { private void verifySortedNumericDouble(Supplier supplier, int maxDoc) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : MultiValueMode.values()) { - if (MultiValueMode.MEDIAN.equals(mode)) { - continue; - } SortedNumericDoubleValues values = supplier.get(); final NumericDoubleValues selected = mode.select(values, missingValue); for (int i = 0; i < maxDoc; ++i) { assertTrue(selected.advanceExact(i)); final double actual = selected.doubleValue(); + verifyDoubleValueCanCalledMoreThanOnce(selected, actual); + double expected = 0.0; if (values.advanceExact(i) == false) { expected = missingValue; @@ -371,6 +378,12 @@ private void verifySortedNumericDouble(Supplier suppl } } + private void verifyDoubleValueCanCalledMoreThanOnce(NumericDoubleValues values, double expected) throws IOException { + for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) { + assertTrue(Double.compare(values.doubleValue(), expected) == 0); + } + } + private void verifySortedNumericDouble(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { @@ -379,7 +392,9 @@ private void verifySortedNumericDouble(Supplier suppl int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); - final double actual = selected.doubleValue();; + final double actual = selected.doubleValue(); + verifyDoubleValueCanCalledMoreThanOnce(selected, actual); + double expected = 0.0; if (mode == MultiValueMode.MAX) { expected = Long.MIN_VALUE; @@ -421,7 +436,7 @@ public void testSingleValuedStrings() throws Exception { final FixedBitSet docsWithValue = randomBoolean() ? null : new FixedBitSet(numDocs); for (int i = 0; i < array.length; ++i) { if (randomBoolean()) { - array[i] = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)); + array[i] = new BytesRef(randomAlphaOfLengthBetween(8, 8)); if (docsWithValue != null) { docsWithValue.set(i); } @@ -456,7 +471,7 @@ public void testMultiValuedStrings() throws Exception { for (int i = 0; i < numDocs; ++i) { final BytesRef[] values = new BytesRef[randomInt(4)]; for (int j = 0; j < values.length; ++j) { - values[j] = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)); + values[j] = new BytesRef(randomAlphaOfLengthBetween(8, 8)); } Arrays.sort(values); array[i] = values; @@ -489,13 +504,15 @@ public int docValueCount() { } private void verifySortedBinary(Supplier supplier, int maxDoc) throws IOException { - for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) { + for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(randomAlphaOfLengthBetween(8, 8)) }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { SortedBinaryDocValues values = supplier.get(); final BinaryDocValues selected = mode.select(values, missingValue); for (int i = 0; i < maxDoc; ++i) { assertTrue(selected.advanceExact(i)); final BytesRef actual = selected.binaryValue(); + verifyBinaryValueCanCalledMoreThanOnce(selected, actual); + BytesRef expected = null; if (values.advanceExact(i) == false) { expected = missingValue; @@ -524,8 +541,14 @@ private void verifySortedBinary(Supplier supplier, int ma } } + private void verifyBinaryValueCanCalledMoreThanOnce(BinaryDocValues values, BytesRef expected) throws IOException { + for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) { + assertEquals(values.binaryValue(), expected); + } + } + private void verifySortedBinary(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { - for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) { + for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(randomAlphaOfLengthBetween(8, 8)) }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { SortedBinaryDocValues values = supplier.get(); final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); @@ -533,6 +556,8 @@ private void verifySortedBinary(Supplier supplier, int ma for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); final BytesRef actual = selected.binaryValue(); + verifyBinaryValueCanCalledMoreThanOnce(selected, actual); + BytesRef expected = null; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { @@ -658,7 +683,11 @@ private void verifySortedSet(Supplier supplier, int maxDoc) SortedSetDocValues values = supplier.get(); final SortedDocValues selected = mode.select(values); for (int i = 0; i < maxDoc; ++i) { - final long actual = selected.advanceExact(i) ? selected.ordValue() : -1; + long actual = -1; + if (selected.advanceExact(i)) { + actual = selected.ordValue(); + verifyOrdValueCanCalledMoreThanOnce(selected, selected.ordValue()); + } int expected = -1; if (values.advanceExact(i)) { for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { @@ -679,13 +708,23 @@ private void verifySortedSet(Supplier supplier, int maxDoc) } } + private void verifyOrdValueCanCalledMoreThanOnce(SortedDocValues values, long expected) throws IOException { + for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) { + assertEquals(values.ordValue(), expected); + } + } + private void verifySortedSet(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { SortedSetDocValues values = supplier.get(); final SortedDocValues selected = mode.select(values, rootDocs, new BitSetIterator(innerDocs, 0L)); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { - final int actual = selected.advanceExact(root) ? selected.ordValue() : -1; + int actual = -1; + if (selected.advanceExact(root)) { + actual = selected.ordValue(); + verifyOrdValueCanCalledMoreThanOnce(selected, actual); + } int expected = -1; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { From 96823b0480decdd3e7276d85cf92148151c81616 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 9 Oct 2017 15:26:44 +0200 Subject: [PATCH 10/10] update Lucene version for 6.0-RC2 version --- core/src/main/java/org/elasticsearch/Version.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 30d6c142100d6..5839251b6b728 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -115,7 +115,7 @@ public class Version implements Comparable { new Version(V_6_0_0_rc1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); public static final int V_6_0_0_rc2_ID = 6000052; public static final Version V_6_0_0_rc2 = - new Version(V_6_0_0_rc2_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); + new Version(V_6_0_0_rc2_ID, org.apache.lucene.util.Version.LUCENE_7_0_1); public static final int V_6_1_0_ID = 6010099; public static final Version V_6_1_0 = new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);